You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2018/06/05 10:41:46 UTC

[01/26] carbondata git commit: [CARBONDATA-2566] Optimize CarbonReaderExample

Repository: carbondata
Updated Branches:
  refs/heads/branch-1.4 b401a9f97 -> 5afc2b74c


[CARBONDATA-2566] Optimize CarbonReaderExample

Optimize CarbonReaderExample
1.Add different data type, including date and timestamp
2. update the doc
3.invoke the
Schema schema = CarbonSchemaReader
.readSchemaInSchemaFile(dataFiles[0].getAbsolutePath())
.asOriginOrder();

This closes #2356


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/ef47070a
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/ef47070a
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/ef47070a

Branch: refs/heads/branch-1.4
Commit: ef47070a6890355206360d76b264f1ad2ec21310
Parents: 75f638e
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 31 15:52:57 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 15 ++--
 .../examples/sdk/CarbonReaderExample.java       | 92 +++++++++++++++++---
 2 files changed, 89 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/ef47070a/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index ec70919..2371b33 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -415,17 +415,22 @@ External client can make use of this reader to read CarbonData files without Car
     String path = "./testWriteFiles";
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
-        .projection(new String[]{"name", "age"})
+        .projection(new String[]{"stringField", "shortField", "intField", "longField", 
+                "doubleField", "boolField", "dateField", "timeField", "decimalField"})
         .build();
 
     // 2. Read data
+    long day = 24L * 3600 * 1000;
     int i = 0;
     while (reader.hasNext()) {
-      Object[] row = (Object[]) reader.readNextRow();
-      System.out.println(row[0] + "\t" + row[1]);
-      i++;
+        Object[] row = (Object[]) reader.readNextRow();
+        System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+            i, row[0], row[1], row[2], row[3], row[4], row[5],
+            new Date((day * ((int) row[6]))), new Timestamp((long) row[7] / 1000), row[8]
+        ));
+        i++;
     }
-    
+
     // 3. Close this reader
     reader.close();
 ```

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ef47070a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
index d7886c0..8d3ff0d 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
@@ -18,16 +18,19 @@
 package org.apache.carbondata.examples.sdk;
 
 import java.io.File;
+import java.io.FilenameFilter;
+import java.sql.Date;
+import java.sql.Timestamp;
 
 import org.apache.commons.io.FileUtils;
 
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.sdk.file.CarbonReader;
+import org.apache.carbondata.sdk.file.CarbonSchemaReader;
 import org.apache.carbondata.sdk.file.CarbonWriter;
 import org.apache.carbondata.sdk.file.Field;
 import org.apache.carbondata.sdk.file.Schema;
 
-
 /**
  * Example fo CarbonReader with close method
  * After readNextRow of CarbonReader, User should close the reader,
@@ -39,36 +42,99 @@ public class CarbonReaderExample {
         try {
             FileUtils.deleteDirectory(new File(path));
 
-            Field[] fields = new Field[2];
-            fields[0] = new Field("name", DataTypes.STRING);
-            fields[1] = new Field("age", DataTypes.INT);
+            Field[] fields = new Field[9];
+            fields[0] = new Field("stringField", DataTypes.STRING);
+            fields[1] = new Field("shortField", DataTypes.SHORT);
+            fields[2] = new Field("intField", DataTypes.INT);
+            fields[3] = new Field("longField", DataTypes.LONG);
+            fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+            fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+            fields[6] = new Field("dateField", DataTypes.DATE);
+            fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+            fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
 
             CarbonWriter writer = CarbonWriter.builder()
-                    .outputPath(path)
-                    .persistSchemaFile(true)
-                    .buildWriterForCSVInput(new Schema(fields));
+                .outputPath(path)
+                .buildWriterForCSVInput(new Schema(fields));
 
             for (int i = 0; i < 10; i++) {
-                writer.write(new String[]{"robot" + (i % 10), String.valueOf(i)});
+                String[] row2 = new String[]{
+                    "robot" + (i % 10),
+                    String.valueOf(i),
+                    String.valueOf(i),
+                    String.valueOf(Long.MAX_VALUE - i),
+                    String.valueOf((double) i / 2),
+                    String.valueOf(true),
+                    "2019-03-02",
+                    "2019-02-12 03:03:34",
+                    "12.345"
+                };
+                writer.write(row2);
             }
             writer.close();
 
+            File[] dataFiles = new File(path).listFiles(new FilenameFilter() {
+                @Override
+                public boolean accept(File dir, String name) {
+                    if (name == null) {
+                        return false;
+                    }
+                    return name.endsWith("carbonindex");
+                }
+            });
+            if (dataFiles == null || dataFiles.length < 1) {
+                throw new RuntimeException("Carbon index file not exists.");
+            }
+            Schema schema = CarbonSchemaReader
+                .readSchemaInIndexFile(dataFiles[0].getAbsolutePath())
+                .asOriginOrder();
+            // Transform the schema
+            String[] strings = new String[schema.getFields().length];
+            for (int i = 0; i < schema.getFields().length; i++) {
+                strings[i] = (schema.getFields())[i].getFieldName();
+            }
+
             // Read data
             CarbonReader reader = CarbonReader
-                    .builder(path, "_temp")
-                    .projection(new String[]{"name", "age"})
-                    .build();
+                .builder(path, "_temp")
+                .projection(strings)
+                .build();
 
             System.out.println("\nData:");
+            long day = 24L * 3600 * 1000;
+            int i = 0;
             while (reader.hasNext()) {
                 Object[] row = (Object[]) reader.readNextRow();
-                System.out.println(row[0] + " " + row[1]);
+                System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+                    i, row[0], row[1], row[2], row[3], row[4], row[5],
+                    new Date((day * ((int) row[6]))), new Timestamp((long) row[7] / 1000), row[8]
+                ));
+                i++;
+            }
+            System.out.println("\nFinished");
+
+            // Read data
+            CarbonReader reader2 = CarbonReader
+                .builder(path, "_temp")
+                .projectAllColumns()
+                .build();
+
+            System.out.println("\nData:");
+            i = 0;
+            while (reader2.hasNext()) {
+              Object[] row = (Object[]) reader2.readNextRow();
+              System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+                  i, row[0], new Date((day * ((int) row[1]))), new Timestamp((long) row[2] / 1000),
+                  row[3], row[4], row[5], row[6], row[7], row[8]
+              ));
+              i++;
             }
             System.out.println("\nFinished");
             reader.close();
             FileUtils.deleteDirectory(new File(path));
-        } catch (Exception e) {
+        } catch (Throwable e) {
             e.printStackTrace();
+            System.out.println(e.getMessage());
         }
     }
 }


[16/26] carbondata git commit: [CARBONDATA-2491] Fix the error when reader read twice with SDK carbonReader

Posted by ra...@apache.org.
[CARBONDATA-2491] Fix the error when reader read twice with SDK carbonReader

This PR includes:
1. Fix the error out of bound when reader read twice with SDK carbonReader
2. Fix the java.lang.NegativeArraySizeException
3. Add timestamp and bad record test case
4. support parallel read of two readers

This closes #2318


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8b735851
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8b735851
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8b735851

Branch: refs/heads/branch-1.4
Commit: 8b735851940c4480b09b8324ecb0befa6bfa3e9f
Parents: 8ef6bd1
Author: xubo245 <xu...@huawei.com>
Authored: Fri May 18 15:40:16 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../core/datamap/DataMapStoreManager.java       |   2 +-
 .../scan/result/iterator/ChunkRowIterator.java  |  18 +-
 .../carbondata/hadoop/CarbonRecordReader.java   |   3 +
 .../carbondata/sdk/file/CarbonReader.java       |  17 ++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 234 ++++++++++++++++++-
 5 files changed, 263 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b735851/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 1359e85..0fcf4cd 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -411,7 +411,7 @@ public final class DataMapStoreManager {
   }
 
   /**
-   * this methos clears the datamap of table from memory
+   * this methods clears the datamap of table from memory
    */
   public void clearDataMaps(String tableUniqName) {
     List<TableDataMap> tableIndices = allDataMaps.get(tableUniqName);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b735851/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
index 1235789..0866395 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
@@ -31,14 +31,14 @@ public class ChunkRowIterator extends CarbonIterator<Object[]> {
   private CarbonIterator<RowBatch> iterator;
 
   /**
-   * currect chunk
+   * current chunk
    */
-  private RowBatch currentchunk;
+  private RowBatch currentChunk;
 
   public ChunkRowIterator(CarbonIterator<RowBatch> iterator) {
     this.iterator = iterator;
     if (iterator.hasNext()) {
-      currentchunk = iterator.next();
+      currentChunk = iterator.next();
     }
   }
 
@@ -50,13 +50,13 @@ public class ChunkRowIterator extends CarbonIterator<Object[]> {
    * @return {@code true} if the iteration has more elements
    */
   @Override public boolean hasNext() {
-    if (null != currentchunk) {
-      if ((currentchunk.hasNext())) {
+    if (null != currentChunk) {
+      if ((currentChunk.hasNext())) {
         return true;
-      } else if (!currentchunk.hasNext()) {
+      } else if (!currentChunk.hasNext()) {
         while (iterator.hasNext()) {
-          currentchunk = iterator.next();
-          if (currentchunk != null && currentchunk.hasNext()) {
+          currentChunk = iterator.next();
+          if (currentChunk != null && currentChunk.hasNext()) {
             return true;
           }
         }
@@ -71,7 +71,7 @@ public class ChunkRowIterator extends CarbonIterator<Object[]> {
    * @return the next element in the iteration
    */
   @Override public Object[] next() {
-    return currentchunk.next();
+    return currentChunk.next();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b735851/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index 1191a38..d4b091c 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -23,6 +23,7 @@ import java.util.Map;
 
 import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.datamap.DataMapStoreManager;
 import org.apache.carbondata.core.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.scan.executor.QueryExecutor;
 import org.apache.carbondata.core.scan.executor.QueryExecutorFactory;
@@ -118,6 +119,8 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
         CarbonUtil.clearDictionaryCache(entry.getValue());
       }
     }
+    // Clear the datamap cache
+    DataMapStoreManager.getInstance().getDefaultDataMap(queryModel.getTable()).clear();
     // close read support
     readSupport.close();
     try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b735851/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index c9cd8f5..6517e89 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -44,6 +44,8 @@ public class CarbonReader<T> {
 
   private int index;
 
+  private boolean initialise;
+
   /**
    * Call {@link #builder(String)} to construct an instance
    */
@@ -51,6 +53,7 @@ public class CarbonReader<T> {
     if (readers.size() == 0) {
       throw new IllegalArgumentException("no reader");
     }
+    this.initialise = true;
     this.readers = readers;
     this.index = 0;
     this.currentReader = readers.get(0);
@@ -60,6 +63,7 @@ public class CarbonReader<T> {
    * Return true if has next row
    */
   public boolean hasNext() throws IOException, InterruptedException {
+    validateReader();
     if (currentReader.nextKeyValue()) {
       return true;
     } else {
@@ -78,6 +82,7 @@ public class CarbonReader<T> {
    * Read and return next row object
    */
   public T readNextRow() throws IOException, InterruptedException {
+    validateReader();
     return currentReader.getCurrentValue();
   }
 
@@ -111,6 +116,18 @@ public class CarbonReader<T> {
    * @throws IOException
    */
   public void close() throws IOException {
+    validateReader();
     this.currentReader.close();
+    this.initialise = false;
+  }
+
+  /**
+   * Validate the reader
+   */
+  private void validateReader() {
+    if (!this.initialise) {
+      throw new RuntimeException(this.getClass().getSimpleName() +
+          " not initialise, please create it first.");
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b735851/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 937dde8..0d2c84e 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -18,21 +18,30 @@
 package org.apache.carbondata.sdk.file;
 
 import java.io.File;
+import java.io.FileFilter;
 import java.io.FilenameFilter;
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
 
+import junit.framework.TestCase;
 import org.apache.commons.io.FileUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-public class CarbonReaderTest {
+public class CarbonReaderTest extends TestCase {
 
   @Before
   public void cleanFile() {
@@ -77,6 +86,99 @@ public class CarbonReaderTest {
     Assert.assertEquals(i, 100);
 
     reader.close();
+
+    // Read again
+    CarbonReader reader2 = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+
+    i = 0;
+    while (reader2.hasNext()) {
+      Object[] row = (Object[]) reader2.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+    reader2.close();
+
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testReadFilesParallel() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+    // Reader 2
+    CarbonReader reader2 = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      Object[] row2 = (Object[]) reader2.readNextRow();
+      // parallel compare
+      Assert.assertEquals(row[0], row2[0]);
+      Assert.assertEquals(row[1], row2[1]);
+    }
+
+    reader.close();
+    reader2.close();
+
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testReadAfterClose() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
+        .projection(new String[]{"name", "age"}).build();
+
+    reader.close();
+    String msg = "CarbonReader not initialise, please create it first.";
+    try {
+      reader.hasNext();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equals(msg));
+    }
+
+    try {
+      reader.readNextRow();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equals(msg));
+    }
+
+    try {
+      reader.close();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equals(msg));
+    }
+
     FileUtils.deleteDirectory(new File(path));
   }
 
@@ -177,4 +279,134 @@ public class CarbonReaderTest {
     reader.close();
     FileUtils.deleteDirectory(new File(path));
   }
+
+  CarbonProperties carbonProperties;
+
+  @Override
+  public void setUp() {
+    carbonProperties = CarbonProperties.getInstance();
+  }
+
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(CarbonReaderTest.class.getName());
+
+  @Test
+  public void testTimeStampAndBadRecord() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("intField", DataTypes.INT);
+    fields[2] = new Field("shortField", DataTypes.SHORT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2018-05-12",
+            "2018-05-12",
+            "12.345"
+        };
+        writer.write(row);
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+    LOGGER.audit("Bad record location:" + storeLocation);
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
+      @Override
+      public boolean accept(File pathname) {
+        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
+      }
+    });
+    Assert.assertNotNull(dataFiles);
+    Assert.assertTrue(dataFiles.length > 0);
+
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
+        .projection(new String[]{
+            "stringField"
+            , "shortField"
+            , "intField"
+            , "longField"
+            , "doubleField"
+            , "boolField"
+            , "dateField"
+            , "timeField"
+            , "decimalField"}).build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
 }


[13/26] carbondata git commit: [CARBONDATA-2538] added filter while listing files from writer path

Posted by ra...@apache.org.
[CARBONDATA-2538] added filter while listing files from writer path

1. Added filter to list only index and carbondata files. So even if the lock files are present proper exception can be thrown
2. Updated complex type docs

This closes #2344


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/2ebd8b1d
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/2ebd8b1d
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/2ebd8b1d

Branch: refs/heads/branch-1.4
Commit: 2ebd8b1d59875130f16b9d2e1701af5b0bacc1c7
Parents: 60b6569
Author: kunal642 <ku...@gmail.com>
Authored: Fri May 25 16:21:45 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../core/metadata/schema/table/CarbonTable.java          |  2 +-
 .../readcommitter/LatestFilesReadCommittedScope.java     |  9 ++++++++-
 .../java/org/apache/carbondata/core/util/CarbonUtil.java | 11 ++++++++---
 docs/supported-data-types-in-carbondata.md               |  2 ++
 .../command/table/CarbonDescribeFormattedCommand.scala   |  2 +-
 5 files changed, 20 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebd8b1d/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index 8528d6f..b1ed981 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -892,7 +892,7 @@ public class CarbonTable implements Serializable {
 
 
   public long size() throws IOException {
-    Map<String, Long> dataIndexSize = CarbonUtil.calculateDataIndexSize(this);
+    Map<String, Long> dataIndexSize = CarbonUtil.calculateDataIndexSize(this, true);
     Long dataSize = dataIndexSize.get(CarbonCommonConstants.CARBON_TOTAL_DATA_SIZE);
     if (dataSize == null) {
       dataSize = 0L;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebd8b1d/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
index 6106174..14bba65 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
@@ -26,6 +26,7 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
 import org.apache.carbondata.core.mutate.UpdateVO;
@@ -138,7 +139,13 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
   @Override public void takeCarbonIndexFileSnapShot() throws IOException {
     // Read the current file Path get the list of indexes from the path.
     CarbonFile file = FileFactory.getCarbonFile(carbonFilePath);
-    if (file.listFiles().length == 0) {
+    CarbonFile[] files = file.listFiles(new CarbonFileFilter() {
+      @Override public boolean accept(CarbonFile file) {
+        return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
+            .endsWith(CarbonTablePath.CARBON_DATA_EXT);
+      }
+    });
+    if (files.length == 0) {
       // For nonTransactional table, files can be removed at any point of time.
       // So cannot assume files will be present
       throw new IOException("No files are present in the table location :" + carbonFilePath);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebd8b1d/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 23d02ef..9ccd772 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2550,7 +2550,8 @@ public final class CarbonUtil {
   /**
    * This method will calculate the data size and index size for carbon table
    */
-  public static Map<String, Long> calculateDataIndexSize(CarbonTable carbonTable)
+  public static Map<String, Long> calculateDataIndexSize(CarbonTable carbonTable,
+      Boolean updateSize)
       throws IOException {
     Map<String, Long> dataIndexSizeMap = new HashMap<String, Long>();
     long dataSize = 0L;
@@ -2565,7 +2566,11 @@ public final class CarbonUtil {
       SegmentStatusManager segmentStatusManager = new SegmentStatusManager(identifier);
       ICarbonLock carbonLock = segmentStatusManager.getTableStatusLock();
       try {
-        if (carbonLock.lockWithRetries()) {
+        boolean lockAcquired = true;
+        if (updateSize) {
+          lockAcquired = carbonLock.lockWithRetries();
+        }
+        if (lockAcquired) {
           LOGGER.info("Acquired lock for table for table status updation");
           String metadataPath = carbonTable.getMetadataPath();
           LoadMetadataDetails[] loadMetadataDetails =
@@ -2593,7 +2598,7 @@ public final class CarbonUtil {
             }
           }
           // If it contains old segment, write new load details
-          if (needUpdate) {
+          if (needUpdate && updateSize) {
             SegmentStatusManager.writeLoadDetailsIntoFile(
                 CarbonTablePath.getTableStatusFilePath(identifier.getTablePath()),
                 loadMetadataDetails);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebd8b1d/docs/supported-data-types-in-carbondata.md
----------------------------------------------------------------------
diff --git a/docs/supported-data-types-in-carbondata.md b/docs/supported-data-types-in-carbondata.md
index 6c21508..7260afe 100644
--- a/docs/supported-data-types-in-carbondata.md
+++ b/docs/supported-data-types-in-carbondata.md
@@ -38,6 +38,8 @@
   * Complex Types
     * arrays: ARRAY``<data_type>``
     * structs: STRUCT``<col_name : data_type COMMENT col_comment, ...>``
+    
+    **NOTE**: Only 2 level complex type schema is supported for now.
 
   * Other Types
     * BOOLEAN
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebd8b1d/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index 69bb91f..7d15cc1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -96,7 +96,7 @@ private[sql] case class CarbonDescribeFormattedCommand(
     val tableComment = tblProps.asScala.getOrElse(CarbonCommonConstants.TABLE_COMMENT, "")
     results ++= Seq(("Comment", tableComment, ""))
     results ++= Seq(("Table Block Size ", carbonTable.getBlockSizeInMB + " MB", ""))
-    val dataIndexSize = CarbonUtil.calculateDataIndexSize(carbonTable)
+    val dataIndexSize = CarbonUtil.calculateDataIndexSize(carbonTable, false)
     if (!dataIndexSize.isEmpty) {
       results ++= Seq((CarbonCommonConstants.TABLE_DATA_SIZE,
         dataIndexSize.get(CarbonCommonConstants.CARBON_TOTAL_DATA_SIZE).toString, ""))


[02/26] carbondata git commit: [CARBONDATA-2529] Fixed S3 Issue for Hadoop 2.8.3

Posted by ra...@apache.org.
[CARBONDATA-2529] Fixed S3 Issue for Hadoop 2.8.3

This issue fixes the issue while loading the data with S3 as backend

This closes #2340


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d0dc8220
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d0dc8220
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d0dc8220

Branch: refs/heads/branch-1.4
Commit: d0dc822078bce30d42b43153e1bd19361261b585
Parents: 8f3ecaf
Author: Bhavya <bh...@knoldus.com>
Authored: Thu May 24 21:17:58 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../carbondata/core/datastore/filesystem/HDFSCarbonFile.java      | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d0dc8220/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
index 4663ac5..fc5420d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
@@ -120,7 +120,8 @@ public class HDFSCarbonFile extends AbstractDFSCarbonFile {
         ((DistributedFileSystem) fs).rename(fileStatus.getPath(), new Path(changetoName),
             org.apache.hadoop.fs.Options.Rename.OVERWRITE);
         return true;
-      } else if (fileStatus.getPath().toString().startsWith("s3n")) {
+      } else if (fileStatus.getPath().toString().startsWith("s3n")
+          || fileStatus.getPath().toString().startsWith("s3a")) {
         fs.delete(new Path(changetoName), true);
         return fs.rename(fileStatus.getPath(), new Path(changetoName));
       } else {


[19/26] carbondata git commit: [CARBONDATA-2499][Test] Validate the visible/invisible status of datamap

Posted by ra...@apache.org.
[CARBONDATA-2499][Test] Validate the visible/invisible status of datamap

This closes #2325


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/009ccaf5
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/009ccaf5
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/009ccaf5

Branch: refs/heads/branch-1.4
Commit: 009ccaf58a4778ecfd436360aeafada927d78763
Parents: 9a90e17
Author: xubo245 <xu...@huawei.com>
Authored: Mon May 21 15:47:24 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../testsuite/datamap/CGDataMapTestCase.scala   | 71 ++++++++++++++----
 .../testsuite/datamap/FGDataMapTestCase.scala   | 78 +++++++++++++++++++-
 .../DataLoadFailAllTypeSortTest.scala           |  4 -
 3 files changed, 130 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/009ccaf5/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
index 848acde..b5c3df1 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
@@ -22,7 +22,6 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
-import org.apache.hadoop.fs.Path
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
@@ -401,30 +400,70 @@ class CGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"DROP TABLE IF EXISTS $tableName")
     sql(
       s"""
-        | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
-        | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+         | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
+         | STORED BY 'org.apache.carbondata.format'
+         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
       """.stripMargin)
     // register datamap writer
-    sql(s"create datamap $dataMapName1 on table $tableName using '${classOf[CGDataMapFactory].getName}' DMPROPERTIES('index_columns'='name')")
-    sql(s"create datamap $dataMapName2 on table $tableName using '${classOf[CGDataMapFactory].getName}' DMPROPERTIES('index_columns'='city')")
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName1
+         | ON TABLE $tableName
+         | USING '${classOf[CGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='name')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName2
+         | ON TABLE $tableName
+         | USING '${classOf[CGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='city')
+       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
+    val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df1(0).getString(0).contains("CG DataMap"))
+    assert(df1(0).getString(0).contains(dataMapName1))
+    val e11 = intercept[Exception] {
+      assert(df1(0).getString(0).contains(dataMapName2))
+    }
+    assert(e11.getMessage.contains("did not contain \"" + dataMapName2))
 
     // make datamap1 invisible
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
-    checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
-      sql("select * from normal_test where name='n502670' and city='c2670'"))
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
+    val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e = intercept[Exception] {
+      assert(df2(0).getString(0).contains(dataMapName1))
+    }
+    assert(e.getMessage.contains("did not contain \"" + dataMapName1))
+    assert(df2(0).getString(0).contains(dataMapName2))
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
 
     // also make datamap2 invisible
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
-    checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
-      sql("select * from normal_test where name='n502670' and city='c2670'"))
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df3 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e31 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName1))
+    }
+    assert(e31.getMessage.contains("did not contain \"" + dataMapName1))
+    val e32 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e32.getMessage.contains("did not contain \"" + dataMapName2))
 
     // make datamap1,datamap2 visible
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
-    checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
-      sql("select * from normal_test where name='n502670' and city='c2670'"))
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df4 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df4(0).getString(0).contains(dataMapName1))
+    val e41 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e41.getMessage.contains("did not contain \"" + dataMapName2))
   }
 
   test("test datamap storage in system folder") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/009ccaf5/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
index e2642ff..2d666c3 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
@@ -22,12 +22,10 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.core.datamap.{DataMapDistributable, DataMapMeta}
-import org.apache.carbondata.core.datamap.Segment
-import org.apache.carbondata.core.datamap.dev.{DataMapModel, DataMapBuilder, DataMapWriter}
 import org.apache.carbondata.core.datamap.{DataMapDistributable, DataMapMeta, Segment}
 import org.apache.carbondata.core.datamap.dev.{DataMapModel, DataMapBuilder, DataMapWriter}
 import org.apache.carbondata.core.datamap.dev.fgdatamap.{FineGrainBlocklet, FineGrainDataMap, FineGrainDataMapFactory}
@@ -488,9 +486,83 @@ class FGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
       sql("select * from normal_test where name='n502670' and city='c2670'"))
   }
 
+  test("test invisible datamap during query") {
+    val tableName = "datamap_testFG"
+    val dataMapName1 = "datamap1"
+    val dataMapName2 = "datamap2"
+    sql(s"DROP TABLE IF EXISTS $tableName")
+    sql(
+      s"""
+         | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
+         | STORED BY 'org.apache.carbondata.format'
+         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    // register datamap writer
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName1
+         | ON TABLE $tableName
+         | USING '${classOf[FGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='name')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName2
+         | ON TABLE $tableName
+         | USING '${classOf[FGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='city')
+       """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
+    val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df1(0).getString(0).contains("FG DataMap"))
+    assert(df1(0).getString(0).contains(dataMapName1))
+    val e11 = intercept[Exception] {
+      assert(df1(0).getString(0).contains(dataMapName2))
+    }
+    assert(e11.getMessage.contains("did not contain \"" + dataMapName2))
+
+    // make datamap1 invisible
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
+    val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e = intercept[Exception] {
+      assert(df2(0).getString(0).contains(dataMapName1))
+    }
+    assert(e.getMessage.contains("did not contain \"" + dataMapName1))
+    assert(df2(0).getString(0).contains(dataMapName2))
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+
+    // also make datamap2 invisible
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df3 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e31 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName1))
+    }
+    assert(e31.getMessage.contains("did not contain \"" + dataMapName1))
+    val e32 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e32.getMessage.contains("did not contain \"" + dataMapName2))
+
+    // make datamap1,datamap2 visible
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df4 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df4(0).getString(0).contains(dataMapName1))
+    val e41 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e41.getMessage.contains("did not contain \"" + dataMapName2))
+  }
+
   override protected def afterAll(): Unit = {
     CompactionSupportGlobalSortBigFileTest.deleteFile(file2)
     sql("DROP TABLE IF EXISTS normal_test")
     sql("DROP TABLE IF EXISTS datamap_test")
+    sql("DROP TABLE IF EXISTS datamap_testFG")
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/009ccaf5/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
index 121150c..a7dceb4 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.carbondata
 
-import java.io.File
-
 import org.apache.spark.sql.common.util.Spark2QueryTest
 import org.apache.spark.sql.hive.HiveContext
 import org.scalatest.BeforeAndAfterAll
@@ -28,8 +26,6 @@ import org.apache.carbondata.core.util.CarbonProperties
 
 /**
  * Test Class for detailed query on timestamp datatypes
- *
- *
  */
 class DataLoadFailAllTypeSortTest extends Spark2QueryTest with BeforeAndAfterAll {
   var hiveContext: HiveContext = _


[03/26] carbondata git commit: [Documentation] Editorial Review comment fixed

Posted by ra...@apache.org.
[Documentation] Editorial Review comment fixed

Editorial Review comment fixed

This closes #2320


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8f3ecaf4
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8f3ecaf4
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8f3ecaf4

Branch: refs/heads/branch-1.4
Commit: 8f3ecaf4b2e855983bf280b40c5077da409f9e64
Parents: dc0ec1e
Author: sgururajshetty <sg...@gmail.com>
Authored: Thu May 31 17:36:26 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/data-management-on-carbondata.md    | 4 ++--
 docs/datamap/timeseries-datamap-guide.md | 8 ++++----
 2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8f3ecaf4/docs/data-management-on-carbondata.md
----------------------------------------------------------------------
diff --git a/docs/data-management-on-carbondata.md b/docs/data-management-on-carbondata.md
index 51e98ab..706209c 100644
--- a/docs/data-management-on-carbondata.md
+++ b/docs/data-management-on-carbondata.md
@@ -35,11 +35,11 @@ This tutorial is going to introduce all commands and data operations on CarbonDa
   
   ```
   CREATE TABLE [IF NOT EXISTS] [db_name.]table_name[(col_name data_type , ...)]
-  STORED BY 'carbondata'
+  STORED AS carbondata
   [TBLPROPERTIES (property_name=property_value, ...)]
   [LOCATION 'path']
   ```
-  **NOTE:** CarbonData also supports "STORED AS carbondata". Find example code at [CarbonSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala) in the CarbonData repo.
+  **NOTE:** CarbonData also supports "STORED AS carbondata" and "USING carbondata". Find example code at [CarbonSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala) in the CarbonData repo.
 ### Usage Guidelines
 
   Following are the guidelines for TBLPROPERTIES, CarbonData's additional table options can be set via carbon.properties.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8f3ecaf4/docs/datamap/timeseries-datamap-guide.md
----------------------------------------------------------------------
diff --git a/docs/datamap/timeseries-datamap-guide.md b/docs/datamap/timeseries-datamap-guide.md
index 7847312..bea5286 100644
--- a/docs/datamap/timeseries-datamap-guide.md
+++ b/docs/datamap/timeseries-datamap-guide.md
@@ -1,12 +1,12 @@
 # CarbonData Timeseries DataMap
 
-* [Timeseries DataMap](#timeseries-datamap-intoduction-(alpha-feature-in-1.3.0))
+* [Timeseries DataMap Introduction](#timeseries-datamap-intoduction)
 * [Compaction](#compacting-pre-aggregate-tables)
 * [Data Management](#data-management-with-pre-aggregate-tables)
 
-## Timeseries DataMap Intoduction (Alpha feature in 1.3.0)
-Timeseries DataMap a pre-aggregate table implementation based on 'preaggregate' DataMap. 
-Difference is that Timerseries DataMap has built-in understanding of time hierarchy and 
+## Timeseries DataMap Introduction (Alpha feature in 1.3.0)
+Timeseries DataMap a pre-aggregate table implementation based on 'pre-aggregate' DataMap.
+Difference is that Timeseries DataMap has built-in understanding of time hierarchy and
 levels: year, month, day, hour, minute, so that it supports automatic roll-up in time dimension 
 for query.
 


[18/26] carbondata git commit: [CARBONDATA-2389] Search mode support FG datamap

Posted by ra...@apache.org.
[CARBONDATA-2389] Search mode support FG datamap

Search mode support FG datamap

This closes #2290


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/cb71ffe1
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/cb71ffe1
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/cb71ffe1

Branch: refs/heads/branch-1.4
Commit: cb71ffe1ac1a39ef34df43457e704232a4c1444e
Parents: 67766ab
Author: xubo245 <60...@qq.com>
Authored: Wed May 9 21:20:59 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |  12 +
 .../core/datamap/DataMapStoreManager.java       |  20 +-
 .../apache/carbondata/core/datamap/Segment.java |   2 +-
 .../datamap/dev/expr/AndDataMapExprWrapper.java |  16 +
 .../datamap/dev/expr/DataMapExprWrapper.java    |  13 +
 .../dev/expr/DataMapExprWrapperImpl.java        |   8 +
 .../datamap/dev/expr/OrDataMapExprWrapper.java  |  13 +
 .../LatestFilesReadCommittedScope.java          |  43 ++-
 .../core/readcommitter/ReadCommittedScope.java  |   2 +-
 .../TableStatusReadCommittedScope.java          |   2 +-
 .../lucene/LuceneDataMapFactoryBase.java        |   4 +-
 .../examples/LuceneDataMapExample.scala         |   2 -
 .../carbondata/hadoop/CarbonRecordReader.java   |   8 +-
 .../hadoop/api/CarbonInputFormat.java           |   6 +-
 .../lucene/LuceneFineGrainDataMapSuite.scala    |   1 +
 ...eneFineGrainDataMapWithSearchModeSuite.scala | 328 +++++++++++++++++++
 .../detailquery/SearchModeTestCase.scala        |  27 ++
 .../execution/command/CarbonHiveCommands.scala  |   4 +-
 .../spark/sql/optimizer/CarbonFilters.scala     |   2 +
 .../store/worker/SearchRequestHandler.java      |  37 ++-
 .../scala/org/apache/spark/rpc/Master.scala     |  13 +-
 21 files changed, 521 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 8ebce9e..08aa704 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1658,6 +1658,18 @@ public final class CarbonCommonConstants {
   public static final String CARBON_SEARCH_MODE_ENABLE_DEFAULT = "false";
 
   /**
+   * It's timeout threshold of carbon search query
+   */
+  @CarbonProperty
+  @InterfaceStability.Unstable
+  public static final String CARBON_SEARCH_QUERY_TIMEOUT = "carbon.search.query.timeout";
+
+  /**
+   * Default value is 10 seconds
+   */
+  public static final String CARBON_SEARCH_QUERY_TIMEOUT_DEFAULT = "10s";
+
+  /**
    * The size of thread pool used for reading files in Work for search mode. By default,
    * it is number of cores in Worker
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 0fcf4cd..96d2b1c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -96,13 +96,19 @@ public final class DataMapStoreManager {
       String dbName = carbonTable.getDatabaseName();
       String tableName = carbonTable.getTableName();
       String dmName = dataMap.getDataMapSchema().getDataMapName();
-      boolean isDmVisible = sessionInfo.getSessionParams().getProperty(
-          String.format("%s%s.%s.%s", CarbonCommonConstants.CARBON_DATAMAP_VISIBLE,
-              dbName, tableName, dmName), "true").trim().equalsIgnoreCase("true");
-      if (!isDmVisible) {
-        LOGGER.warn(String.format("Ignore invisible datamap %s on table %s.%s",
-            dmName, dbName, tableName));
-        dataMapIterator.remove();
+      // TODO: need support get the visible status of datamap without sessionInfo in the future
+      if (sessionInfo != null) {
+        boolean isDmVisible = sessionInfo.getSessionParams().getProperty(
+            String.format("%s%s.%s.%s", CarbonCommonConstants.CARBON_DATAMAP_VISIBLE,
+                dbName, tableName, dmName), "true").trim().equalsIgnoreCase("true");
+        if (!isDmVisible) {
+          LOGGER.warn(String.format("Ignore invisible datamap %s on table %s.%s",
+              dmName, dbName, tableName));
+          dataMapIterator.remove();
+        }
+      } else {
+        String message = "Carbon session info is null";
+        LOGGER.info(message);
       }
     }
     return allDataMaps;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java b/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
index 85c7176..7b63b84 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
@@ -115,7 +115,7 @@ public class Segment implements Serializable {
 
   public SegmentRefreshInfo getSegmentRefreshInfo(UpdateVO updateVo)
       throws IOException {
-    return readCommittedScope.getCommitedSegmentRefreshInfo(this, updateVo);
+    return readCommittedScope.getCommittedSegmentRefreshInfo(this, updateVo);
   }
 
   public String getSegmentNo() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
index 1de16bc..ec674de 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
@@ -20,6 +20,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -59,6 +60,21 @@ public class AndDataMapExprWrapper implements DataMapExprWrapper {
     return andBlocklets;
   }
 
+  @Override
+  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+          throws IOException {
+    List<ExtendedBlocklet> leftPrune = left.prune(distributable, partitionsToPrune);
+    List<ExtendedBlocklet> rightPrune = right.prune(distributable, partitionsToPrune);
+    List<ExtendedBlocklet> andBlocklets = new ArrayList<>();
+    for (ExtendedBlocklet blocklet : leftPrune) {
+      if (rightPrune.contains(blocklet)) {
+        andBlocklets.add(blocklet);
+      }
+    }
+    return andBlocklets;
+  }
+
   @Override public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets)
       throws IOException {
     List<ExtendedBlocklet> leftPrune = left.pruneBlocklets(blocklets);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
index 5a04529..901cfc7 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
@@ -20,6 +20,7 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.List;
 
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -41,6 +42,18 @@ public interface DataMapExprWrapper extends Serializable {
       throws IOException;
 
   /**
+   * prune blocklet according distributable
+   *
+   * @param distributable     distributable
+   * @param partitionsToPrune partitions to prune
+   * @return the pruned ExtendedBlocklet list
+   * @throws IOException
+   */
+  List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+          throws IOException;
+
+  /**
    * It is used in case on distributable datamap. First using job it gets all blockets from all
    * related datamaps. These blocklets are passed to this method to apply expression.
    * @param blocklets

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
index 38f2336..6537976 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
@@ -25,6 +25,7 @@ import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datamap.TableDataMap;
+import org.apache.carbondata.core.datamap.dev.DataMap;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.indexstore.PartitionSpec;
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
@@ -52,6 +53,13 @@ public class DataMapExprWrapperImpl implements DataMapExprWrapper {
     return dataMap.prune(segments, expression, partitionsToPrune);
   }
 
+  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+      throws IOException {
+    List<DataMap> dataMaps = dataMap.getTableDataMaps(distributable);
+    return dataMap.prune(dataMaps, distributable, expression, partitionsToPrune);
+  }
+
   @Override public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets)
       throws IOException {
     List<ExtendedBlocklet> blockletList = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
index 4988903..bb98535 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
@@ -22,6 +22,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -58,6 +59,18 @@ public class OrDataMapExprWrapper implements DataMapExprWrapper {
     return new ArrayList<>(andBlocklets);
   }
 
+  @Override
+  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+          throws IOException {
+    List<ExtendedBlocklet> leftPrune = left.prune(distributable, partitionsToPrune);
+    List<ExtendedBlocklet> rightPrune = right.prune(distributable, partitionsToPrune);
+    Set<ExtendedBlocklet> andBlocklets = new HashSet<>();
+    andBlocklets.addAll(leftPrune);
+    andBlocklets.addAll(rightPrune);
+    return new ArrayList<>(andBlocklets);
+  }
+
   @Override public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets)
       throws IOException {
     List<ExtendedBlocklet> leftPrune = left.pruneBlocklets(blocklets);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
index 14bba65..6a1234e 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
@@ -17,10 +17,7 @@
 package org.apache.carbondata.core.readcommitter;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
@@ -43,11 +40,20 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 public class LatestFilesReadCommittedScope implements ReadCommittedScope {
 
   private String carbonFilePath;
+  private String segmentId;
   private ReadCommittedIndexFileSnapShot readCommittedIndexFileSnapShot;
   private LoadMetadataDetails[] loadMetadataDetails;
 
-  public LatestFilesReadCommittedScope(String path)  {
+  /**
+   * a new constructor of this class
+   *
+   * @param path      carbon file path
+   * @param segmentId segment id
+   */
+  public LatestFilesReadCommittedScope(String path, String segmentId) {
+    Objects.requireNonNull(path);
     this.carbonFilePath = path;
+    this.segmentId = segmentId;
     try {
       takeCarbonIndexFileSnapShot();
     } catch (IOException ex) {
@@ -55,6 +61,15 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
     }
   }
 
+  /**
+   * a new constructor with path
+   *
+   * @param path carbon file path
+   */
+  public LatestFilesReadCommittedScope(String path) {
+    this(path, null);
+  }
+
   private void prepareLoadMetadata() {
     int loadCount = 0;
     Map<String, List<String>> snapshotMap =
@@ -101,13 +116,16 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
       segName = segment.getSegmentFileName();
     }
     List<String> index = snapShot.get(segName);
+    if (null == index) {
+      index = new LinkedList<>();
+    }
     for (String indexPath : index) {
       indexFileStore.put(indexPath, null);
     }
     return indexFileStore;
   }
 
-  @Override public SegmentRefreshInfo getCommitedSegmentRefreshInfo(
+  @Override public SegmentRefreshInfo getCommittedSegmentRefreshInfo(
       Segment segment, UpdateVO updateVo) throws IOException {
     Map<String, SegmentRefreshInfo> snapShot =
         readCommittedIndexFileSnapShot.getSegmentTimestampUpdaterMap();
@@ -140,9 +158,10 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
     // Read the current file Path get the list of indexes from the path.
     CarbonFile file = FileFactory.getCarbonFile(carbonFilePath);
     CarbonFile[] files = file.listFiles(new CarbonFileFilter() {
-      @Override public boolean accept(CarbonFile file) {
+      @Override
+      public boolean accept(CarbonFile file) {
         return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
-            .endsWith(CarbonTablePath.CARBON_DATA_EXT);
+            .endsWith(CarbonTablePath.CARBON_DATA_EXT) || file.getName().endsWith("Fact");
       }
     });
     if (files.length == 0) {
@@ -152,8 +171,14 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
     }
     Map<String, List<String>> indexFileStore = new HashMap<>();
     Map<String, SegmentRefreshInfo> segmentTimestampUpdaterMap = new HashMap<>();
+    CarbonFile[] carbonIndexFiles = null;
     if (file.isDirectory()) {
-      CarbonFile[] carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(carbonFilePath);
+      if (segmentId == null) {
+        carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(carbonFilePath);
+      } else {
+        String segmentPath = CarbonTablePath.getSegmentPath(carbonFilePath, segmentId);
+        carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(segmentPath);
+      }
       for (int i = 0; i < carbonIndexFiles.length; i++) {
         // TODO. If Required to support merge index, then this code has to be modified.
         // TODO. Nested File Paths.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
index 6ff4b89..d177a00 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
@@ -45,7 +45,7 @@ public interface ReadCommittedScope extends Serializable {
    */
   public Map<String, String> getCommittedIndexFile(Segment segment) throws IOException ;
 
-  public SegmentRefreshInfo getCommitedSegmentRefreshInfo(
+  public SegmentRefreshInfo getCommittedSegmentRefreshInfo(
       Segment segment, UpdateVO updateVo) throws IOException;
 
   public void takeCarbonIndexFileSnapShot() throws IOException;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
index 91ebd41..1f61aab 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
@@ -79,7 +79,7 @@ public class TableStatusReadCommittedScope implements ReadCommittedScope {
     return indexFiles;
   }
 
-  public SegmentRefreshInfo getCommitedSegmentRefreshInfo(Segment segment, UpdateVO updateVo)
+  public SegmentRefreshInfo getCommittedSegmentRefreshInfo(Segment segment, UpdateVO updateVo)
       throws IOException {
     SegmentRefreshInfo segmentRefreshInfo;
     if (updateVo != null) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
index fab0565..1da8edd 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
@@ -29,6 +29,7 @@ import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandExcept
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.DataMapDistributable;
+import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.DataMapMeta;
 import org.apache.carbondata.core.datamap.DataMapStoreManager;
 import org.apache.carbondata.core.datamap.Segment;
@@ -235,7 +236,8 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
     }
     for (CarbonFile indexDir : indexDirs) {
       // Filter out the tasks which are filtered through CG datamap.
-      if (!segment.getFilteredIndexShardNames().contains(indexDir.getName())) {
+      if (getDataMapLevel() != DataMapLevel.FG &&
+          !segment.getFilteredIndexShardNames().contains(indexDir.getName())) {
         continue;
       }
       DataMapDistributable luceneDataMapDistributable = new LuceneDataMapDistributable(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
index efe2a63..fe94f54 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
@@ -61,8 +61,6 @@ object LuceneDataMapExample {
          | DMProperties('INDEX_COLUMNS'='id , name')
       """.stripMargin)
 
-    spark.sql("refresh datamap dm ON TABLE personTable")
-
     // 1. Compare the performance:
 
     def time(code: => Unit): Double = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index cad20fc..da84c00 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -78,8 +78,12 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
     } else {
       throw new RuntimeException("unsupported input split type: " + inputSplit);
     }
-    List<TableBlockInfo> tableBlockInfoList = CarbonInputSplit.createBlocks(splitList);
-    queryModel.setTableBlockInfos(tableBlockInfoList);
+    // It should use the exists tableBlockInfos if tableBlockInfos of queryModel is not empty
+    // otherwise the prune is no use before this method
+    if (queryModel.getTableBlockInfos().isEmpty()) {
+      List<TableBlockInfo> tableBlockInfoList = CarbonInputSplit.createBlocks(splitList);
+      queryModel.setTableBlockInfos(tableBlockInfoList);
+    }
     readSupport.initialize(queryModel.getProjectionColumns(), queryModel.getTable());
     try {
       carbonIterator = new ChunkRowIterator(queryExecutor.execute(queryModel));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index cf51162..05c70f8 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -372,7 +372,7 @@ m filterExpression
     List<ExtendedBlocklet> prunedBlocklets =
         getPrunedBlocklets(job, carbonTable, resolver, segmentIds);
 
-    List<CarbonInputSplit> resultFilterredBlocks = new ArrayList<>();
+    List<CarbonInputSplit> resultFilteredBlocks = new ArrayList<>();
     int partitionIndex = 0;
     List<Integer> partitionIdList = new ArrayList<>();
     if (partitionInfo != null && partitionInfo.getPartitionType() != PartitionType.NATIVE_HIVE) {
@@ -401,7 +401,7 @@ m filterExpression
         if (matchedPartitions == null || matchedPartitions.get(partitionIndex)) {
           CarbonInputSplit inputSplit = convertToCarbonInputSplit(blocklet);
           if (inputSplit != null) {
-            resultFilterredBlocks.add(inputSplit);
+            resultFilteredBlocks.add(inputSplit);
           }
         }
       }
@@ -409,7 +409,7 @@ m filterExpression
     statistic
         .addStatistics(QueryStatisticsConstants.LOAD_BLOCKS_DRIVER, System.currentTimeMillis());
     recorder.recordStatisticsForDriver(statistic, job.getConfiguration().get("query.id"));
-    return resultFilterredBlocks;
+    return resultFilteredBlocks;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
index 638d24d..f64a349 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
@@ -438,6 +438,7 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
       .contains("Unsupported alter operation on hive table"))
     sql("drop datamap if exists dm2 on table datamap_test_table")
   }
+
   test("test Clean Files and check Lucene DataMap") {
     sql("DROP TABLE IF EXISTS datamap_test_table")
     sql(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala
new file mode 100644
index 0000000..0ceead8
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala
@@ -0,0 +1,328 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.datamap.lucene
+
+import java.io.{File, PrintWriter}
+
+import scala.util.Random
+
+import org.apache.spark.sql.{CarbonEnv, CarbonSession, Row}
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.core.datamap.status.DataMapStatusManager
+
+/**
+  * Test lucene fine grain datamap with search mode
+  */
+class LuceneFineGrainDataMapWithSearchModeSuite extends QueryTest with BeforeAndAfterAll {
+
+  val file2 = resourcesPath + "/datamap_input.csv"
+
+  override protected def beforeAll(): Unit = {
+    //n should be about 5000000 of reset if size is default 1024
+    val n = 500000
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    CarbonProperties
+      .getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT, "100s")
+    LuceneFineGrainDataMapSuite.createFile(file2, n)
+    sql("create database if not exists lucene")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION,
+        CarbonEnv.getDatabaseLocation("lucene", sqlContext.sparkSession))
+    sql("use lucene")
+    sql("DROP TABLE IF EXISTS datamap_test")
+    sql(
+      """
+        | CREATE TABLE datamap_test(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+  }
+
+  test("test lucene fine grain data map with search mode") {
+
+    sqlContext.sparkSession.sparkContext.setLogLevel("WARN")
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='Name')
+      """.stripMargin)
+
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test WHERE TEXT_MATCH('name:n10')"),
+      sql(s"select * from datamap_test where name='n10'"))
+
+    sql("drop datamap dm on table datamap_test")
+  }
+
+  // TODO: optimize performance
+  ignore("test lucene fine grain data map with TEXT_MATCH 'AND' Filter") {
+    sql("drop datamap if exists dm on table datamap_test")
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
+    checkAnswer(
+      sql("SELECT count(*) FROM datamap_test WHERE TEXT_MATCH('name:n2*') " +
+        "AND age=28 and id=200149"),
+      sql("SELECT count(*) FROM datamap_test WHERE name like 'n2%' " +
+        "AND age=28 and id=200149"))
+    sql("drop datamap if exists dm on table datamap_test")
+  }
+
+  // TODO: optimize performance
+  ignore("test lucene fine grain data map with TEXT_MATCH 'AND' and 'OR' Filter ") {
+    sql("drop datamap if exists dm on table datamap_test")
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test WHERE TEXT_MATCH('name:n1*') OR TEXT_MATCH ('city:c01*') " +
+      "AND TEXT_MATCH('city:C02*')"),
+      sql("select * from datamap_test where name like 'n1%' OR city like 'c01%' and city like" +
+        " 'c02%'"))
+    sql("drop datamap if exists dm on table datamap_test")
+  }
+
+  test("test lucene fine grain data map with compaction-Major ") {
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+    sql(
+      """
+        | CREATE TABLE datamap_test_table(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test_table
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n10')"),
+      sql("select * from datamap_test_table where name='n10'"))
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    sql("alter table datamap_test_table compact 'major'")
+    checkAnswer(sql("SELECT COUNT(*) FROM datamap_test_table WHERE TEXT_MATCH('name:n10')"),
+      sql("select COUNT(*) from datamap_test_table where name='n10'"))
+    sql("drop datamap if exists dm on table datamap_test_table")
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+  }
+
+  test("test lucene fine grain datamap rebuild") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test5
+         | USING 'lucene'
+         | WITH DEFERRED REBUILD
+         | DMProperties('INDEX_COLUMNS'='city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    val map = DataMapStatusManager.readDataMapStatusMap()
+    assert(!map.get("dm").isEnabled)
+    sql("REBUILD DATAMAP dm ON TABLE datamap_test5")
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  test("test lucene fine grain datamap rebuild with table block size") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'TABLE_BLOCKSIZE'='1')
+      """.stripMargin)
+    sql(
+    s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test5
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='Name , cIty')
+      """.stripMargin)
+
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c00')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c00'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c0100085')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c0100085'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c09560')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c09560'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  test("test lucene fine grain multiple data map on table") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm2 ON TABLE datamap_test5
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='city')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm1 ON TABLE datamap_test5
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='Name')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('name:n10')"),
+      sql(s"select * from datamap_test5 where name='n10'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  // TODO: support it  in the future
+  ignore("test lucene datamap and validate the visible and invisible status of datamap ") {
+    val tableName = "datamap_test2"
+    val dataMapName1 = "ggdatamap1";
+    sql(s"DROP TABLE IF EXISTS $tableName")
+    sql(
+      s"""
+         | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
+         | STORED BY 'org.apache.carbondata.format'
+         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    // register datamap writer
+    sql(
+      s"""
+         | CREATE DATAMAP ggdatamap1 ON TABLE $tableName
+         | USING 'lucene'
+         | DMPROPERTIES('index_columns'='name')
+       """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
+
+    val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE TEXT_MATCH('name:n502670')").collect()
+    sql(s"SELECT * FROM $tableName WHERE TEXT_MATCH('name:n502670')").show()
+    println(df1(0).getString(0))
+    assertResult(
+      s"""== CarbonData Profiler ==
+         |Table Scan on datamap_test2
+         | - total blocklets: 1
+         | - filter: TEXT_MATCH('name:n502670')
+         | - pruned by Main DataMap
+         |    - skipped blocklets: 0
+         | - pruned by FG DataMap
+         |    - name: ggdatamap1
+         |    - provider: lucene
+         |    - skipped blocklets: 1
+         |""".stripMargin)(df1(0).getString(0))
+
+    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
+
+    val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670'").collect()
+    println(df2(0).getString(0))
+    assertResult(
+      s"""== CarbonData Profiler ==
+         |Table Scan on $tableName
+         | - total blocklets: 1
+         | - filter: (name <> null and name = n502670)
+         | - pruned by Main DataMap
+         |    - skipped blocklets: 0
+         |""".stripMargin)(df2(0).getString(0))
+
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"))
+    sql(s"DROP TABLE IF EXISTS $tableName")
+  }
+
+  ignore("test lucene fine grain datamap rebuild with table block size, rebuild") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'TABLE_BLOCKSIZE'='1')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test5
+         | USING 'lucene'
+         | WITH DEFERRED REBUILD
+         | DMProperties('INDEX_COLUMNS'='Name , cIty')
+      """.stripMargin)
+
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    sql("REBUILD DATAMAP dm ON TABLE datamap_test5")
+
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+    sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')").show()
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')").show()
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  override protected def afterAll(): Unit = {
+    LuceneFineGrainDataMapSuite.deleteFile(file2)
+    sql("DROP TABLE IF EXISTS datamap_test")
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql("use default")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION,
+        CarbonProperties.getStorePath)
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+  }
+
+  def createFile(fileName: String, line: Int = 10000, start: Int = 0) = {
+    val write = new PrintWriter(new File(fileName))
+    for (i <- start until (start + line)) {
+      write.println(i + "," + "n" + i + "," + "c0" + i + "," + Random.nextInt(80))
+    }
+    write.close()
+  }
+
+  def deleteFile(fileName: String): Unit = {
+    val file = new File(fileName)
+    if (file.exists()) {
+      file.delete()
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
index 2c94dab..d278fc5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
@@ -109,4 +109,31 @@ class SearchModeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("set carbon.search.enabled = false")
     assert(!sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
   }
+
+  test("test lucene datamap with search mode") {
+    sql("DROP DATAMAP IF EXISTS dm ON TABLE main")
+    sql("CREATE DATAMAP dm ON TABLE main USING 'lucene' DMProperties('INDEX_COLUMNS'='id') ")
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('id:100000')"),
+      sql(s"SELECT * FROM main WHERE id='100000'"))
+    sql("DROP DATAMAP if exists dm ON TABLE main")
+  }
+
+  test("test lucene datamap with search mode 2") {
+    sql("drop datamap if exists dm3 ON TABLE main")
+    sql("CREATE DATAMAP dm3 ON TABLE main USING 'lucene' DMProperties('INDEX_COLUMNS'='city') ")
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('city:city6')"),
+      sql("SELECT * FROM main WHERE city='city6'"))
+    sql("DROP DATAMAP if exists dm3 ON TABLE main")
+  }
+
+  test("test lucene datamap with search mode, two column") {
+    sql("drop datamap if exists dm3 ON TABLE main")
+    sql("CREATE DATAMAP dm3 ON TABLE main USING 'lucene' DMProperties('INDEX_COLUMNS'='city , id') ")
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('city:city6')"),
+      sql("SELECT * FROM main WHERE city='city6'"))
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('id:100000')"),
+      sql(s"SELECT * FROM main WHERE id='100000'"))
+    sql("DROP DATAMAP if exists dm3 ON TABLE main")
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
index 29dcec9..186e39e 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
@@ -68,10 +68,10 @@ case class CarbonSetCommand(command: SetCommand)
   override val output: Seq[Attribute] = command.output
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
-    val sessionParms = CarbonEnv.getInstance(sparkSession).carbonSessionInfo.getSessionParams
+    val sessionParams = CarbonEnv.getInstance(sparkSession).carbonSessionInfo.getSessionParams
     command.kv match {
       case Some((key, Some(value))) =>
-        CarbonSetCommand.validateAndSetValue(sessionParms, key, value)
+        CarbonSetCommand.validateAndSetValue(sessionParams, key, value)
 
         // handle search mode start/stop for ThriftServer usage
         if (key.equalsIgnoreCase(CarbonCommonConstants.CARBON_SEARCH_MODE_ENABLE)) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
index 07a444f..c052cd7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
@@ -382,6 +382,8 @@ object CarbonFilters {
             CarbonScalaUtil.convertSparkToCarbonDataType(dataType)))
         new AndExpression(l, r)
       case StringTrim(child) => transformExpression(child)
+      case s: ScalaUDF =>
+        new MatchExpression(s.children.head.toString())
       case _ =>
         new SparkUnknownExpression(expr.transform {
           case AttributeReference(name, dataType, _, _) =>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
----------------------------------------------------------------------
diff --git a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
index 9727352..f6406c7 100644
--- a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
+++ b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
@@ -18,6 +18,7 @@
 package org.apache.carbondata.store.worker;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
@@ -27,7 +28,9 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.DataMapChooser;
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.Segment;
+import org.apache.carbondata.core.datamap.dev.expr.DataMapDistributableWrapper;
 import org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper;
 import org.apache.carbondata.core.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -112,6 +115,8 @@ public class SearchRequestHandler {
     queryModel.setVectorReader(false);
 
     CarbonMultiBlockSplit mbSplit = request.split().value();
+    List<TableBlockInfo> list = CarbonInputSplit.createBlocks(mbSplit.getAllSplits());
+    queryModel.setTableBlockInfos(list);
     long limit = request.limit();
     long rowCount = 0;
 
@@ -158,22 +163,38 @@ public class SearchRequestHandler {
       CarbonMultiBlockSplit mbSplit, DataMapExprWrapper datamap) throws IOException {
     Objects.requireNonNull(datamap);
     List<Segment> segments = new LinkedList<>();
+    HashMap<String, Integer> uniqueSegments = new HashMap<>();
     for (CarbonInputSplit split : mbSplit.getAllSplits()) {
-      segments.add(
-          Segment.toSegment(split.getSegmentId(),
-              new LatestFilesReadCommittedScope(table.getTablePath())));
+      String segmentId = split.getSegmentId();
+      if (uniqueSegments.get(segmentId) == null) {
+        segments.add(Segment.toSegment(
+                segmentId,
+                new LatestFilesReadCommittedScope(table.getTablePath(), segmentId)));
+        uniqueSegments.put(segmentId, 1);
+      } else {
+        uniqueSegments.put(segmentId, uniqueSegments.get(segmentId) + 1);
+      }
+    }
+
+    List<DataMapDistributableWrapper> distributables = datamap.toDistributable(segments);
+    List<ExtendedBlocklet> prunnedBlocklets = new LinkedList<ExtendedBlocklet>();
+    for (int i = 0; i < distributables.size(); i++) {
+      DataMapDistributable dataMapDistributable = distributables.get(i).getDistributable();
+      prunnedBlocklets.addAll(datamap.prune(dataMapDistributable, null));
     }
-    List<ExtendedBlocklet> prunnedBlocklets = datamap.prune(segments, null);
 
-    List<String> pathToRead = new LinkedList<>();
-    for (ExtendedBlocklet prunnedBlocklet : prunnedBlocklets) {
-      pathToRead.add(prunnedBlocklet.getPath());
+    HashMap<String, ExtendedBlocklet> pathToRead = new HashMap<>();
+    for (ExtendedBlocklet prunedBlocklet : prunnedBlocklets) {
+      pathToRead.put(prunedBlocklet.getFilePath(), prunedBlocklet);
     }
 
     List<TableBlockInfo> blocks = queryModel.getTableBlockInfos();
     List<TableBlockInfo> blockToRead = new LinkedList<>();
     for (TableBlockInfo block : blocks) {
-      if (pathToRead.contains(block.getFilePath())) {
+      if (pathToRead.keySet().contains(block.getFilePath())) {
+        // If not set this, it will can't create FineGrainBlocklet object in
+        // org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode.getIndexedData
+        block.setDataMapWriterPath(pathToRead.get(block.getFilePath()).getDataMapWriterPath());
         blockToRead.add(block);
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cb71ffe1/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
----------------------------------------------------------------------
diff --git a/store/search/src/main/scala/org/apache/spark/rpc/Master.scala b/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
index 26de74c..f48f5e4 100644
--- a/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
+++ b/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
@@ -38,8 +38,7 @@ import org.apache.spark.util.ThreadUtils
 
 import org.apache.carbondata.common.annotations.InterfaceAudience
 import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.core.datamap.DataMapChooser
-import org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.block.Distributable
 import org.apache.carbondata.core.datastore.row.CarbonRow
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
@@ -232,10 +231,14 @@ class Master(sparkConf: SparkConf) {
 
       // if we have enough data already, we do not need to collect more result
       if (rowCount < globalLimit) {
-        // wait for worker for 10s
-        ThreadUtils.awaitResult(future, Duration.apply("10s"))
+        // wait for worker
+        val timeout = CarbonProperties
+          .getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT,
+            CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT_DEFAULT)
+        ThreadUtils.awaitResult(future, Duration.apply(timeout))
         LOG.info(s"[SearchId:$queryId] receive search response from worker " +
-                 s"${worker.address}:${worker.port}")
+          s"${worker.address}:${worker.port}")
         try {
           future.value match {
             case Some(response: Try[SearchResult]) =>


[26/26] carbondata git commit: [CARBONDATA-2554] Added support for logical type

Posted by ra...@apache.org.
[CARBONDATA-2554] Added support for logical type

Added support for date and timestamp logical types in AvroCarbonWriter.

This closes #2347


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/5afc2b74
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/5afc2b74
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/5afc2b74

Branch: refs/heads/branch-1.4
Commit: 5afc2b74c793882888ddfd020098dedb3e4a960d
Parents: 62e68ff
Author: kunal642 <ku...@gmail.com>
Authored: Mon May 28 11:41:59 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:07:21 2018 +0530

----------------------------------------------------------------------
 .../DirectDictionaryGenerator.java              |   2 +
 .../DateDirectDictionaryGenerator.java          |   2 +-
 .../TimeStampDirectDictionaryGenerator.java     |   2 +-
 .../TestNonTransactionalCarbonTable.scala       | 145 ++++++++++++++++++-
 .../processing/datatypes/PrimitiveDataType.java |  44 +++++-
 .../loading/dictionary/DirectDictionary.java    |   4 +
 .../InputProcessorStepWithNoConverterImpl.java  |  24 ++-
 .../carbondata/sdk/file/AvroCarbonWriter.java   |  71 ++++++++-
 8 files changed, 279 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
index 469fe1e..2139f31 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
@@ -40,6 +40,8 @@ public interface DirectDictionaryGenerator {
    */
   Object getValueFromSurrogate(int key);
 
+  int generateKey(long value);
+
   /**
    * The method generate and returns the dictionary / surrogate key for direct dictionary column
    * This Method is called while executing filter queries for getting direct surrogate members.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
index c49af9c..329e260 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
@@ -163,7 +163,7 @@ public class DateDirectDictionaryGenerator implements DirectDictionaryGenerator
     }
   }
 
-  private int generateKey(long timeValue) {
+  public int generateKey(long timeValue) {
     if (timeValue < MIN_VALUE || timeValue > MAX_VALUE) {
       if (LOGGER.isDebugEnabled()) {
         LOGGER.debug("Value for date type column is not in valid range. Value considered as null.");

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
index d218e99..c7a4194 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
@@ -206,7 +206,7 @@ public class TimeStampDirectDictionaryGenerator implements DirectDictionaryGener
     }
   }
 
-  private int generateKey(long timeValue) {
+  public int generateKey(long timeValue) {
     long time = (timeValue - cutOffTimeStamp) / granularityFactor;
     int keyValue = -1;
     if (time >= (long) Integer.MIN_VALUE && time <= (long) Integer.MAX_VALUE) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 5beb9c4..095d12d 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -17,7 +17,7 @@
 
 package org.apache.carbondata.spark.testsuite.createTable
 
-import java.sql.Timestamp
+import java.sql.{Date, Timestamp}
 import java.io.{File, FileFilter, IOException}
 import java.util
 import java.util.concurrent.TimeUnit
@@ -42,6 +42,7 @@ import scala.concurrent.duration.Duration
 
 import org.apache.avro
 import org.apache.commons.lang.CharEncoding
+import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
 import tech.allegro.schema.json2avro.converter.JsonAvroConverter
 
 import org.apache.carbondata.core.metadata.datatype.{DataTypes, StructField}
@@ -2151,4 +2152,146 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     writer.close()
   }
 
+  test("test logical type date") {
+    sql("drop table if exists sdkOutputTable")
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+    val schema1 =
+      """{
+        |	"namespace": "com.apache.schema",
+        |	"type": "record",
+        |	"name": "StudentActivity",
+        |	"fields": [
+        |		{
+        |			"name": "id",
+        |						"type": {"type" : "int", "logicalType": "date"}
+        |		},
+        |		{
+        |			"name": "course_details",
+        |			"type": {
+        |				"name": "course_details",
+        |				"type": "record",
+        |				"fields": [
+        |					{
+        |						"name": "course_struct_course_time",
+        |						"type": {"type" : "int", "logicalType": "date"}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |}""".stripMargin
+
+    val json1 =
+      """{"id": 101, "course_details": { "course_struct_course_time":10}}""".stripMargin
+    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
+
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+    writer.write(record)
+    writer.close()
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(dateType date, course_details struct<course_struct_course_time: date>) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(java.sql.Date.valueOf("1970-04-12"), Row(java.sql.Date.valueOf("1970-01-11")))))
+  }
+
+  test("test logical type timestamp-millis") {
+    sql("drop table if exists sdkOutputTable")
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+    val schema1 =
+      """{
+        |	"namespace": "com.apache.schema",
+        |	"type": "record",
+        |	"name": "StudentActivity",
+        |	"fields": [
+        |		{
+        |			"name": "id",
+        |						"type": {"type" : "long", "logicalType": "timestamp-millis"}
+        |		},
+        |		{
+        |			"name": "course_details",
+        |			"type": {
+        |				"name": "course_details",
+        |				"type": "record",
+        |				"fields": [
+        |					{
+        |						"name": "course_struct_course_time",
+        |						"type": {"type" : "long", "logicalType": "timestamp-millis"}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |}""".stripMargin
+
+    val json1 =
+      """{"id": 172800000,"course_details": { "course_struct_course_time":172800000}}""".stripMargin
+
+    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
+
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+    writer.write(record)
+    writer.close()
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(dateType timestamp, course_details struct<course_struct_course_time: timestamp>) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(Timestamp.valueOf("1970-01-02 16:00:00"), Row(Timestamp.valueOf("1970-01-02 16:00:00")))))
+  }
+
+  test("test logical type-micros timestamp") {
+    sql("drop table if exists sdkOutputTable")
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+    val schema1 =
+      """{
+        |	"namespace": "com.apache.schema",
+        |	"type": "record",
+        |	"name": "StudentActivity",
+        |	"fields": [
+        |		{
+        |			"name": "id",
+        |						"type": {"type" : "long", "logicalType": "timestamp-micros"}
+        |		},
+        |		{
+        |			"name": "course_details",
+        |			"type": {
+        |				"name": "course_details",
+        |				"type": "record",
+        |				"fields": [
+        |					{
+        |						"name": "course_struct_course_time",
+        |						"type": {"type" : "long", "logicalType": "timestamp-micros"}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |}""".stripMargin
+
+    val json1 =
+      """{"id": 172800000000,"course_details": { "course_struct_course_time":172800000000}}""".stripMargin
+
+    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
+
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+    writer.write(record)
+    writer.close()
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(dateType timestamp, course_details struct<course_struct_course_time: timestamp>) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(Timestamp.valueOf("1970-01-02 16:00:00"), Row(Timestamp.valueOf("1970-01-02 16:00:00")))))
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
index 7450b82..3a477ce 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
@@ -288,7 +288,11 @@ public class PrimitiveDataType implements GenericDataType<Object> {
           logHolder.setReason(message);
         }
       } else {
-        surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+        if (dictionaryGenerator instanceof DirectDictionary && input instanceof Long) {
+          surrogateKey = ((DirectDictionary) dictionaryGenerator).generateKey((long) input);
+        } else {
+          surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+        }
         if (surrogateKey == CarbonCommonConstants.INVALID_SURROGATE_KEY) {
           surrogateKey = CarbonCommonConstants.MEMBER_DEFAULT_VAL_SURROGATE_KEY;
           message = CarbonDataProcessorUtil
@@ -316,15 +320,36 @@ public class PrimitiveDataType implements GenericDataType<Object> {
           if (!this.carbonDimension.getUseActualData()) {
             byte[] value = null;
             if (isDirectDictionary) {
-              int surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+              int surrogateKey;
+              // If the input is a long value then this means that logical type was provided by
+              // the user using AvroCarbonWriter. In this case directly generate surrogate key
+              // using dictionaryGenerator.
+              if (dictionaryGenerator instanceof DirectDictionary && input instanceof Long) {
+                surrogateKey = ((DirectDictionary) dictionaryGenerator).generateKey((long) input);
+              } else {
+                surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+              }
               if (surrogateKey == CarbonCommonConstants.INVALID_SURROGATE_KEY) {
                 value = new byte[0];
               } else {
                 value = ByteUtil.toBytes(surrogateKey);
               }
             } else {
-              value = DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(parsedValue,
-                  this.carbonDimension.getDataType(), dateFormat);
+              // If the input is a long value then this means that logical type was provided by
+              // the user using AvroCarbonWriter. In this case directly generate Bytes from value.
+              if (this.carbonDimension.getDataType().equals(DataTypes.DATE)
+                  || this.carbonDimension.getDataType().equals(DataTypes.TIMESTAMP)
+                  && input instanceof Long) {
+                if (dictionaryGenerator != null) {
+                  value = ByteUtil.toBytes(((DirectDictionary) dictionaryGenerator)
+                      .generateKey((long) input));
+                } else {
+                  value = ByteUtil.toBytes(Long.parseLong(parsedValue));
+                }
+              } else {
+                value = DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(parsedValue,
+                    this.carbonDimension.getDataType(), dateFormat);
+              }
               if (this.carbonDimension.getDataType() == DataTypes.STRING
                   && value.length > CarbonCommonConstants.MAX_CHARS_PER_COLUMN_DEFAULT) {
                 throw new CarbonDataLoadingException("Dataload failed, String size cannot exceed "
@@ -333,8 +358,15 @@ public class PrimitiveDataType implements GenericDataType<Object> {
             }
             updateValueToByteStream(dataOutputStream, value);
           } else {
-            Object value = DataTypeUtil.getDataDataTypeForNoDictionaryColumn(parsedValue,
-                this.carbonDimension.getDataType(), dateFormat);
+            Object value;
+            if (dictionaryGenerator instanceof DirectDictionary
+                && input instanceof Long) {
+              value = ByteUtil.toBytes(
+                  ((DirectDictionary) dictionaryGenerator).generateKey((long) input));
+            } else {
+              value = DataTypeUtil.getDataDataTypeForNoDictionaryColumn(parsedValue,
+                  this.carbonDimension.getDataType(), dateFormat);
+            }
             if (this.carbonDimension.getDataType() == DataTypes.STRING
                 && value.toString().length() > CarbonCommonConstants.MAX_CHARS_PER_COLUMN_DEFAULT) {
               throw new CarbonDataLoadingException("Dataload failed, String size cannot exceed "

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java b/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
index 165e5a4..33dc8e3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
@@ -46,6 +46,10 @@ public class DirectDictionary implements BiDictionary<Integer, Object> {
     return dictionaryGenerator.generateDirectSurrogateKey(value.toString());
   }
 
+  public Integer generateKey(long value) {
+    return dictionaryGenerator.generateKey(value);
+  }
+
   @Override
   public Object getValue(Integer key) {
     return dictionaryGenerator.getValueFromSurrogate(key);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
index c99a413..5f7a94c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
@@ -28,6 +28,8 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
@@ -215,6 +217,10 @@ public class InputProcessorStepWithNoConverterImpl extends AbstractDataLoadProce
 
     private Map<Integer, GenericDataType> dataFieldsWithComplexDataType;
 
+    private DirectDictionaryGenerator dateDictionaryGenerator;
+
+    private DirectDictionaryGenerator timestampDictionaryGenerator;
+
     public InputProcessorIterator(List<CarbonIterator<Object[]>> inputIterators, int batchSize,
         boolean preFetch, AtomicLong rowCounter, int[] orderOfData, boolean[] noDictionaryMapping,
         DataType[] dataTypes, CarbonDataLoadConfiguration configuration,
@@ -313,7 +319,23 @@ public class InputProcessorStepWithNoConverterImpl extends AbstractDataLoadProce
               throw new CarbonDataLoadingException("Loading Exception", e);
             }
           } else {
-            newData[i] = data[orderOfData[i]];
+            DataType dataType = dataFields[i].getColumn().getDataType();
+            if (dataType == DataTypes.DATE && data[orderOfData[i]] instanceof Long) {
+              if (dateDictionaryGenerator == null) {
+                dateDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
+                    .getDirectDictionaryGenerator(dataType, dataFields[i].getDateFormat());
+              }
+              newData[i] = dateDictionaryGenerator.generateKey((long) data[orderOfData[i]]);
+            } else if (dataType == DataTypes.TIMESTAMP && data[orderOfData[i]] instanceof Long) {
+              if (timestampDictionaryGenerator == null) {
+                timestampDictionaryGenerator =
+                    DirectDictionaryKeyGeneratorFactory
+                        .getDirectDictionaryGenerator(dataType, dataFields[i].getTimestampFormat());
+              }
+              newData[i] = timestampDictionaryGenerator.generateKey((long) data[orderOfData[i]]);
+            } else {
+              newData[i] = data[orderOfData[i]];
+            }
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5afc2b74/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
index 8bbf364..edecd6b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
@@ -24,15 +24,21 @@ import java.util.Random;
 import java.util.UUID;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.keygenerator.directdictionary.timestamp.DateDirectDictionaryGenerator;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.datatype.StructField;
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.hadoop.api.CarbonTableOutputFormat;
 import org.apache.carbondata.hadoop.internal.ObjectArrayWritable;
 import org.apache.carbondata.processing.loading.complexobjects.ArrayObject;
 import org.apache.carbondata.processing.loading.complexobjects.StructObject;
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
 
+import org.apache.avro.LogicalType;
+import org.apache.avro.LogicalTypes;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;
 import org.apache.hadoop.conf.Configuration;
@@ -55,6 +61,8 @@ public class AvroCarbonWriter extends CarbonWriter {
   private TaskAttemptContext context;
   private ObjectArrayWritable writable;
   private Schema avroSchema;
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(CarbonTable.class.getName());
 
   AvroCarbonWriter(CarbonLoadModel loadModel) throws IOException {
     Configuration hadoopConf = new Configuration();
@@ -88,10 +96,35 @@ public class AvroCarbonWriter extends CarbonWriter {
   private Object avroFieldToObject(Schema.Field avroField, Object fieldValue) {
     Object out;
     Schema.Type type = avroField.schema().getType();
+    LogicalType logicalType = avroField.schema().getLogicalType();
     switch (type) {
-      case BOOLEAN:
       case INT:
+        if (logicalType != null) {
+          if (logicalType instanceof LogicalTypes.Date) {
+            int dateIntValue = (int) fieldValue;
+            out = dateIntValue * DateDirectDictionaryGenerator.MILLIS_PER_DAY;
+          } else {
+            LOGGER.warn("Actual type: INT, Logical Type: " + logicalType.getName());
+            out = fieldValue;
+          }
+        } else {
+          out = fieldValue;
+        }
+        break;
+      case BOOLEAN:
       case LONG:
+        if (logicalType != null && !(logicalType instanceof LogicalTypes.TimestampMillis)) {
+          if (logicalType instanceof LogicalTypes.TimestampMicros) {
+            long dateIntValue = (long) fieldValue;
+            out = dateIntValue / 1000L;
+          } else {
+            LOGGER.warn("Actual type: INT, Logical Type: " + logicalType.getName());
+            out = fieldValue;
+          }
+        } else {
+          out = fieldValue;
+        }
+        break;
       case DOUBLE:
       case STRING:
         out = fieldValue;
@@ -177,13 +210,27 @@ public class AvroCarbonWriter extends CarbonWriter {
     String FieldName = avroField.name();
     Schema childSchema = avroField.schema();
     Schema.Type type = childSchema.getType();
+    LogicalType logicalType = childSchema.getLogicalType();
     switch (type) {
       case BOOLEAN:
         return new Field(FieldName, DataTypes.BOOLEAN);
       case INT:
-        return new Field(FieldName, DataTypes.INT);
+        if (logicalType instanceof LogicalTypes.Date) {
+          return new Field(FieldName, DataTypes.DATE);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as INT for " + childSchema
+              .getName());
+          return new Field(FieldName, DataTypes.INT);
+        }
       case LONG:
-        return new Field(FieldName, DataTypes.LONG);
+        if (logicalType instanceof LogicalTypes.TimestampMillis
+            || logicalType instanceof LogicalTypes.TimestampMicros) {
+          return new Field(FieldName, DataTypes.TIMESTAMP);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as LONG for " + childSchema
+              .getName());
+          return new Field(FieldName, DataTypes.LONG);
+        }
       case DOUBLE:
         return new Field(FieldName, DataTypes.DOUBLE);
       case STRING:
@@ -221,13 +268,27 @@ public class AvroCarbonWriter extends CarbonWriter {
 
   private static StructField prepareSubFields(String FieldName, Schema childSchema) {
     Schema.Type type = childSchema.getType();
+    LogicalType logicalType = childSchema.getLogicalType();
     switch (type) {
       case BOOLEAN:
         return new StructField(FieldName, DataTypes.BOOLEAN);
       case INT:
-        return new StructField(FieldName, DataTypes.INT);
+        if (logicalType instanceof LogicalTypes.Date) {
+          return new StructField(FieldName, DataTypes.DATE);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as INT for " + childSchema
+              .getName());
+          return new StructField(FieldName, DataTypes.INT);
+        }
       case LONG:
-        return new StructField(FieldName, DataTypes.LONG);
+        if (logicalType instanceof LogicalTypes.TimestampMillis
+            || logicalType instanceof LogicalTypes.TimestampMicros) {
+          return new StructField(FieldName, DataTypes.TIMESTAMP);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as LONG for " + childSchema
+              .getName());
+          return new StructField(FieldName, DataTypes.LONG);
+        }
       case DOUBLE:
         return new StructField(FieldName, DataTypes.DOUBLE);
       case STRING:


[20/26] carbondata git commit: [CARBONDATA-2555] SDK reader set default isTransactional as false

Posted by ra...@apache.org.
[CARBONDATA-2555] SDK reader set default isTransactional as false

SDK writer is having default value of isTransactional is false. But reader is not like this.
So, Fixing this by deafult make SDK to use flat folder structure.

This closes #2352


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/67766abc
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/67766abc
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/67766abc

Branch: refs/heads/branch-1.4
Commit: 67766abc8d26fb742cb2a2d65775b09eae773024
Parents: df29e4f
Author: ajantha-bhat <aj...@gmail.com>
Authored: Tue May 29 12:03:55 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               |  2 +-
 .../examples/sdk/CarbonReaderExample.java       |  1 -
 .../carbondata/examples/sdk/SDKS3Example.java   | 26 +++-----------------
 .../sdk/file/CarbonReaderBuilder.java           |  4 +--
 .../carbondata/sdk/file/CarbonReaderTest.java   | 20 +++++++++++----
 5 files changed, 21 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/67766abc/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 3c575fe..1d225a9 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -492,7 +492,7 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
-   * Default value is true
+   * Default value is false
    *
    * @param isTransactionalTable whether is transactional table or not
    * @return CarbonReaderBuilder object

http://git-wip-us.apache.org/repos/asf/carbondata/blob/67766abc/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
index 937bfa0..d7886c0 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
@@ -44,7 +44,6 @@ public class CarbonReaderExample {
             fields[1] = new Field("age", DataTypes.INT);
 
             CarbonWriter writer = CarbonWriter.builder()
-                    .isTransactionalTable(true)
                     .outputPath(path)
                     .persistSchemaFile(true)
                     .buildWriterForCSVInput(new Schema(fields));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/67766abc/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
index 7fab2cc..80c56fc 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
@@ -29,8 +29,8 @@ public class SDKS3Example {
     public static void main(String[] args) throws Exception {
         LogService logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
         if (args == null || args.length < 3) {
-            logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>" +
-                    "<s3-endpoint> [table-path-on-s3] [persistSchema] [transactionalTable]");
+            logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
+                + "<s3-endpoint> [table-path-on-s3] [rows]");
             System.exit(0);
         }
 
@@ -44,24 +44,6 @@ public class SDKS3Example {
             num = Integer.parseInt(args[4]);
         }
 
-        Boolean persistSchema = true;
-        if (args.length > 5) {
-            if (args[5].equalsIgnoreCase("true")) {
-                persistSchema = true;
-            } else {
-                persistSchema = false;
-            }
-        }
-
-        Boolean transactionalTable = true;
-        if (args.length > 6) {
-            if (args[6].equalsIgnoreCase("true")) {
-                transactionalTable = true;
-            } else {
-                transactionalTable = false;
-            }
-        }
-
         Field[] fields = new Field[2];
         fields[0] = new Field("name", DataTypes.STRING);
         fields[1] = new Field("age", DataTypes.INT);
@@ -69,9 +51,7 @@ public class SDKS3Example {
                 .setAccessKey(args[0])
                 .setSecretKey(args[1])
                 .setEndPoint(args[2])
-                .outputPath(path)
-                .persistSchemaFile(persistSchema)
-                .isTransactionalTable(transactionalTable);
+                .outputPath(path);
 
         CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/67766abc/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 1e73e8c..e99ff0d 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -48,7 +48,7 @@ public class CarbonReaderBuilder {
   private String[] projectionColumns;
   private Expression filterExpression;
   private String tableName;
-  private boolean isTransactionalTable = true;
+  private boolean isTransactionalTable;
 
   /**
    * It will be true if use the projectAllColumns method,
@@ -84,7 +84,7 @@ public class CarbonReaderBuilder {
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
-   * Default value is true
+   * Default value is false
    *
    * @param isTransactionalTable whether is transactional table or not
    * @return CarbonReaderBuilder object

http://git-wip-us.apache.org/repos/asf/carbondata/blob/67766abc/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index ee095a1..deb6d06 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -61,7 +61,7 @@ public class CarbonReaderTest extends TestCase {
 
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
 
     // expected output after sorting
@@ -87,6 +87,7 @@ public class CarbonReaderTest extends TestCase {
     // Read again
     CarbonReader reader2 = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(new String[]{"name", "age"})
         .build();
 
@@ -118,6 +119,7 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .projection(new String[]{"name", "name", "age", "name"})
+        .isTransactionalTable(true)
         .build();
 
     // expected output after sorting
@@ -159,11 +161,13 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .projection(new String[]{"name", "age"})
+        .isTransactionalTable(true)
         .build();
     // Reader 2
     CarbonReader reader2 = CarbonReader
         .builder(path, "_temp")
         .projection(new String[]{"name", "age"})
+        .isTransactionalTable(true)
         .build();
 
     while (reader.hasNext()) {
@@ -191,7 +195,7 @@ public class CarbonReaderTest extends TestCase {
 
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
 
     reader.close();
@@ -305,7 +309,7 @@ public class CarbonReaderTest extends TestCase {
     // Write to a Non Transactional Table
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true, false);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"})
         .isTransactionalTable(false)
         .build();
@@ -422,8 +426,8 @@ public class CarbonReaderTest extends TestCase {
     Assert.assertNotNull(dataFiles);
     Assert.assertTrue(dataFiles.length > 0);
 
-    CarbonReader reader = CarbonReader
-        .builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(new String[]{
             "stringField"
             , "shortField"
@@ -548,6 +552,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(strings)
         .build();
 
@@ -662,6 +667,7 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .projection(strings)
+        .isTransactionalTable(true)
         .build();
 
     int i = 0;
@@ -766,6 +772,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(strings)
         .build();
 
@@ -808,6 +815,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projectAllColumns()
         .build();
 
@@ -846,6 +854,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .build();
 
     // expected output after sorting
@@ -881,6 +890,7 @@ public class CarbonReaderTest extends TestCase {
       CarbonReader reader = CarbonReader
           .builder(path, "_temp")
           .projection(new String[]{})
+          .isTransactionalTable(true)
           .build();
       assert (false);
     } catch (RuntimeException e) {


[22/26] carbondata git commit: [HOTFIX] Changes in selecting the carbonindex files

Posted by ra...@apache.org.
[HOTFIX] Changes in selecting the carbonindex files

Currently, in the query flow while getting the index files we are checking for either mergeFileName or the list of files. After this change, we will
be checking for both files and mergeFileName

This closes #2333


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/013448ba
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/013448ba
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/013448ba

Branch: refs/heads/branch-1.4
Commit: 013448ba18750bf95a9d3889170c15df9d3931c5
Parents: 2ebd8b1
Author: dhatchayani <dh...@gmail.com>
Authored: Tue May 22 17:26:37 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../core/metadata/SegmentFileStore.java           | 18 ++++++++++++------
 .../core/writer/CarbonIndexFileMergeWriter.java   |  2 ++
 2 files changed, 14 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/013448ba/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
index d72ded3..acfc145 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
@@ -151,7 +151,8 @@ public class SegmentFileStore {
     CarbonFile segmentFolder = FileFactory.getCarbonFile(segmentPath);
     CarbonFile[] indexFiles = segmentFolder.listFiles(new CarbonFileFilter() {
       @Override public boolean accept(CarbonFile file) {
-        return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT);
+        return (file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
+            .endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT));
       }
     });
     if (indexFiles != null && indexFiles.length > 0) {
@@ -160,7 +161,11 @@ public class SegmentFileStore {
       folderDetails.setRelative(true);
       folderDetails.setStatus(SegmentStatus.SUCCESS.getMessage());
       for (CarbonFile file : indexFiles) {
-        folderDetails.getFiles().add(file.getName());
+        if (file.getName().endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT)) {
+          folderDetails.setMergeFileName(file.getName());
+        } else {
+          folderDetails.getFiles().add(file.getName());
+        }
       }
       String segmentRelativePath = segmentPath.substring(tablePath.length(), segmentPath.length());
       segmentFile.addPath(segmentRelativePath, folderDetails);
@@ -508,10 +513,11 @@ public class SegmentFileStore {
           if (null != mergeFileName) {
             indexFiles.put(location + CarbonCommonConstants.FILE_SEPARATOR + mergeFileName,
                 entry.getValue().mergeFileName);
-          } else {
-            for (String indexFile : entry.getValue().getFiles()) {
-              indexFiles.put(location + CarbonCommonConstants.FILE_SEPARATOR + indexFile,
-                  entry.getValue().mergeFileName);
+          }
+          Set<String> files = entry.getValue().getFiles();
+          if (null != files && !files.isEmpty()) {
+            for (String indexFile : files) {
+              indexFiles.put(location + CarbonCommonConstants.FILE_SEPARATOR + indexFile, null);
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/013448ba/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
index ceeb431..cb53c0b 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
@@ -21,6 +21,7 @@ import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 
@@ -140,6 +141,7 @@ public class CarbonIndexFileMergeWriter {
         }
         if (new Path(entry.getKey()).equals(new Path(location))) {
           segentry.getValue().setMergeFileName(mergeIndexFile);
+          segentry.getValue().setFiles(new HashSet<String>());
           break;
         }
       }


[23/26] carbondata git commit: [CARBONDATA-2508] Fix the exception that can't get executorService when start search mode twice

Posted by ra...@apache.org.
[CARBONDATA-2508] Fix the exception that can't get executorService when start search mode twice

This closes #2355


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/dc0ec1e2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/dc0ec1e2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/dc0ec1e2

Branch: refs/heads/branch-1.4
Commit: dc0ec1e270f12f80fca60ae825e2ea91237e3787
Parents: 43e0c59
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 31 09:15:16 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../executor/impl/SearchModeDetailQueryExecutor.java   |  6 ++++--
 .../impl/SearchModeVectorDetailQueryExecutor.java      |  6 ++++--
 .../testsuite/detailquery/SearchModeTestCase.scala     | 13 +++++++++++++
 3 files changed, 21 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/dc0ec1e2/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
index aed472c..ae14327 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
@@ -37,8 +37,10 @@ public class SearchModeDetailQueryExecutor extends AbstractQueryExecutor<Object>
           LogServiceFactory.getLogService(SearchModeDetailQueryExecutor.class.getName());
   private static ExecutorService executorService = null;
 
-  static {
-    initThreadPool();
+  public SearchModeDetailQueryExecutor() {
+    if (executorService == null) {
+      initThreadPool();
+    }
   }
 
   private static synchronized void initThreadPool() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/dc0ec1e2/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
index 00fd511..705c451 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
@@ -40,8 +40,10 @@ public class SearchModeVectorDetailQueryExecutor extends AbstractQueryExecutor<O
           LogServiceFactory.getLogService(SearchModeVectorDetailQueryExecutor.class.getName());
   private static ExecutorService executorService = null;
 
-  static {
-    initThreadPool();
+  public SearchModeVectorDetailQueryExecutor() {
+    if (executorService == null) {
+      initThreadPool();
+    }
   }
 
   private static synchronized void initThreadPool() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/dc0ec1e2/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
index d278fc5..3e6adaf 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
@@ -136,4 +136,17 @@ class SearchModeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP DATAMAP if exists dm3 ON TABLE main")
   }
 
+  test("start search mode twice") {
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    assert(sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
+    checkSearchAnswer("select id from main where id = '3' limit 10")
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+    assert(!sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
+
+    // start twice
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    assert(sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
+    checkSearchAnswer("select id from main where id = '3' limit 10")
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+  }
 }


[09/26] carbondata git commit: [CARBONDATA-2507] enable.offheap.sort not validate in CarbonData

Posted by ra...@apache.org.
[CARBONDATA-2507] enable.offheap.sort not validate in CarbonData

In #2274, the value of enable.offheap.sort will transform to false when args[0] not equal to true, including false and other string, like f,any and so on.

So we should validate it.

This closes #2331


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/df9978a1
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/df9978a1
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/df9978a1

Branch: refs/heads/branch-1.4
Commit: df9978a1587189edfa4d8796da650ad34854e7b9
Parents: 0433253
Author: xubo245 <xu...@huawei.com>
Authored: Wed May 23 16:08:25 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../carbondata/core/util/CarbonProperties.java  | 17 +++++++++++++
 .../carbondata/core/util/SessionParams.java     |  2 ++
 .../core/CarbonPropertiesValidationTest.java    | 25 ++++++++++++++++++++
 3 files changed, 44 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/df9978a1/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 391096d..4ee5199 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -56,6 +56,7 @@ import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CSV_READ_BUFFER_SIZE;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_AUTO_HANDOFF;
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_OFFHEAP_SORT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_UNSAFE_SORT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_VECTOR_READER;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.HANDOFF_SIZE;
@@ -149,6 +150,9 @@ public final class CarbonProperties {
       case ENABLE_UNSAFE_SORT:
         validateEnableUnsafeSort();
         break;
+      case ENABLE_OFFHEAP_SORT:
+        validateEnableOffHeapSort();
+        break;
       case CARBON_CUSTOM_BLOCK_DISTRIBUTION:
         validateCustomBlockDistribution();
         break;
@@ -235,6 +239,7 @@ public final class CarbonProperties {
     validateBlockletGroupSizeInMB();
     validateNumberOfColumnPerIORead();
     validateEnableUnsafeSort();
+    validateEnableOffHeapSort();
     validateCustomBlockDistribution();
     validateEnableVectorReader();
     validateLockType();
@@ -471,6 +476,18 @@ public final class CarbonProperties {
     }
   }
 
+  private void validateEnableOffHeapSort() {
+    String value = carbonProperties.getProperty(ENABLE_OFFHEAP_SORT);
+    boolean isValidBooleanValue = CarbonUtil.validateBoolean(value);
+    if (!isValidBooleanValue) {
+      LOGGER.warn("The enable off heap sort value \"" + value
+          + "\" is invalid. Using the default value \""
+          + CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT);
+      carbonProperties.setProperty(ENABLE_OFFHEAP_SORT,
+          CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT);
+    }
+  }
+
   private void initPropertySet() throws IllegalAccessException {
     Field[] declaredFields = CarbonCommonConstants.class.getDeclaredFields();
     for (Field field : declaredFields) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df9978a1/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
index 3823aef..169c003 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.exception.InvalidConfigurationException;
 
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_CUSTOM_BLOCK_DISTRIBUTION;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_SEARCH_MODE_ENABLE;
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_OFFHEAP_SORT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_UNSAFE_SORT;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_ACTION;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE;
@@ -145,6 +146,7 @@ public class SessionParams implements Serializable, Cloneable {
     boolean isValid = false;
     switch (key) {
       case ENABLE_UNSAFE_SORT:
+      case ENABLE_OFFHEAP_SORT:
       case CARBON_CUSTOM_BLOCK_DISTRIBUTION:
       case CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE:
       case CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD:

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df9978a1/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java b/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
index bbfe26c..7cc665e 100644
--- a/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
@@ -24,6 +24,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.util.CarbonProperties;
 
 import junit.framework.TestCase;
+import org.junit.Assert;
 import org.junit.Test;
 
 /**
@@ -65,6 +66,30 @@ public class CarbonPropertiesValidationTest extends TestCase {
         CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT.equalsIgnoreCase(valueAfterValidation));
   }
 
+  @Test public void testValidateEnableOffHeapSort()
+      throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+    Method validateMethodType =
+        carbonProperties.getClass().getDeclaredMethod("validateEnableOffHeapSort");
+    validateMethodType.setAccessible(true);
+    carbonProperties.addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, "True");
+    assert (carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT)
+        .equalsIgnoreCase("true"));
+    carbonProperties.addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, "false");
+    assert (carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT)
+        .equalsIgnoreCase("false"));
+    carbonProperties.addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, "xyz");
+    assert (carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT)
+        .equalsIgnoreCase("true"));
+    String valueBeforeValidation =
+        carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT);
+    validateMethodType.invoke(carbonProperties);
+    String valueAfterValidation =
+        carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT);
+    assertTrue(valueBeforeValidation.equals(valueAfterValidation));
+    assertTrue(
+        CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT.equalsIgnoreCase(valueAfterValidation));
+  }
+
   @Test public void testValidateCustomBlockDistribution()
       throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
     Method validateMethodType =


[04/26] carbondata git commit: [CARBONDATA-2500] Add new API to read user's schema in SDK

Posted by ra...@apache.org.
[CARBONDATA-2500] Add new API to read user's schema in SDK

The field order in schema that SDK returns is different between write and read data type of schema in SDK

This closes #2341


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/c7fff9e2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/c7fff9e2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/c7fff9e2

Branch: refs/heads/branch-1.4
Commit: c7fff9e2fce11e049337e3d041ced0b15060365f
Parents: 013448b
Author: xubo245 <xu...@huawei.com>
Authored: Fri May 25 16:07:55 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../ThriftWrapperSchemaConverterImpl.java       |  12 +-
 .../core/metadata/schema/table/CarbonTable.java |   6 +-
 .../core/metadata/schema/table/TableInfo.java   |   4 +-
 .../core/metadata/schema/table/TableSchema.java |  14 +-
 .../schema/table/TableSchemaBuilder.java        |   2 +-
 .../core/reader/CarbonHeaderReader.java         |   4 +-
 .../util/AbstractDataFileFooterConverter.java   |   6 +-
 .../apache/carbondata/core/util/CarbonUtil.java |   8 +-
 .../core/util/DataFileFooterConverter.java      |   4 +-
 .../core/util/DataFileFooterConverter2.java     |   2 +-
 .../core/util/DataFileFooterConverterV3.java    |   4 +-
 .../ThriftWrapperSchemaConverterImplTest.java   |   4 +-
 .../hadoop/testutil/StoreCreator.java           |   2 +-
 .../presto/util/CarbonDataStoreCreator.scala    |   2 +-
 .../command/carbonTableSchemaCommon.scala       |   2 +-
 .../org/apache/spark/sql/CarbonSource.scala     |   2 +-
 .../datasources/SparkCarbonFileFormat.scala     |   2 +-
 .../spark/sql/hive/CarbonFileMetastore.scala    |   2 +-
 .../spark/sql/hive/CarbonHiveMetaStore.scala    |   2 +-
 .../carbondata/sdk/file/CarbonReader.java       |  63 +++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 454 ++++++++++++++++++-
 21 files changed, 549 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
index 644e6a3..f03b997 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
@@ -269,7 +269,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
       thriftColumnSchema.add(fromWrapperToExternalColumnSchema(wrapperColumnSchema));
     }
     org.apache.carbondata.format.SchemaEvolution schemaEvolution =
-        fromWrapperToExternalSchemaEvolution(wrapperTableSchema.getSchemaEvalution());
+        fromWrapperToExternalSchemaEvolution(wrapperTableSchema.getSchemaEvolution());
     org.apache.carbondata.format.TableSchema externalTableSchema =
         new org.apache.carbondata.format.TableSchema(
             wrapperTableSchema.getTableId(), thriftColumnSchema, schemaEvolution);
@@ -535,7 +535,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
         externalColumnSchema.getParentColumnTableRelations();
     if (null != parentColumnTableRelation) {
       wrapperColumnSchema.setParentColumnTableRelations(
-          fromExtrenalToWrapperParentTableColumnRelations(parentColumnTableRelation));
+          fromExternalToWrapperParentTableColumnRelations(parentColumnTableRelation));
     }
     return wrapperColumnSchema;
   }
@@ -595,11 +595,11 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
       listOfColumns.add(fromExternalToWrapperColumnSchema(externalColumnSchema));
     }
     wrapperTableSchema.setListOfColumns(listOfColumns);
-    wrapperTableSchema.setSchemaEvalution(
+    wrapperTableSchema.setSchemaEvolution(
         fromExternalToWrapperSchemaEvolution(externalTableSchema.getSchema_evolution()));
     if (externalTableSchema.isSetBucketingInfo()) {
       wrapperTableSchema.setBucketingInfo(
-          fromExternalToWarpperBucketingInfo(externalTableSchema.bucketingInfo));
+          fromExternalToWrapperBucketingInfo(externalTableSchema.bucketingInfo));
     }
     if (externalTableSchema.getPartitionInfo() != null) {
       wrapperTableSchema.setPartitionInfo(
@@ -608,7 +608,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
     return wrapperTableSchema;
   }
 
-  private BucketingInfo fromExternalToWarpperBucketingInfo(
+  private BucketingInfo fromExternalToWrapperBucketingInfo(
       org.apache.carbondata.format.BucketingInfo externalBucketInfo) {
     List<ColumnSchema> listOfColumns = new ArrayList<ColumnSchema>();
     for (org.apache.carbondata.format.ColumnSchema externalColumnSchema :
@@ -661,7 +661,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
     return childSchema;
   }
 
-  private List<ParentColumnTableRelation> fromExtrenalToWrapperParentTableColumnRelations(
+  private List<ParentColumnTableRelation> fromExternalToWrapperParentTableColumnRelations(
       List<org.apache.carbondata.format.ParentColumnTableRelation> thirftParentColumnRelation) {
     List<ParentColumnTableRelation> parentColumnTableRelationList = new ArrayList<>();
     for (org.apache.carbondata.format.ParentColumnTableRelation carbonTableRelation :

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index b1ed981..ba051be 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -566,15 +566,15 @@ public class CarbonTable implements Serializable {
     List<CarbonDimension> dimensions = tableDimensionsMap.get(tableName);
     List<CarbonMeasure> measures = tableMeasuresMap.get(tableName);
     List<CarbonColumn> columnList = new ArrayList<>(dimensions.size() + measures.size());
-    List<CarbonColumn> complexdimensionList = new ArrayList<>(dimensions.size());
+    List<CarbonColumn> complexDimensionList = new ArrayList<>(dimensions.size());
     for (CarbonColumn column : dimensions) {
       if (column.isComplex()) {
-        complexdimensionList.add(column);
+        complexDimensionList.add(column);
       } else {
         columnList.add(column);
       }
     }
-    columnList.addAll(complexdimensionList);
+    columnList.addAll(complexDimensionList);
     for (CarbonColumn column : measures) {
       if (!(column.getColName().equals("default_dummy_measure"))) {
         columnList.add(column);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
index c7bcf2e..38145e5 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
@@ -124,13 +124,13 @@ public class TableInfo implements Serializable, Writable {
   }
 
   private void updateIsSchemaModified() {
-    if (null != factTable.getSchemaEvalution()) {
+    if (null != factTable.getSchemaEvolution()) {
       // If schema evolution entry list size is > 1 that means an alter operation is performed
       // which has added the new schema entry in the schema evolution list.
       // Currently apart from create table schema evolution entries
       // are getting added only in the alter operations.
       isSchemaModified =
-          factTable.getSchemaEvalution().getSchemaEvolutionEntryList().size() > 1 ? true : false;
+          factTable.getSchemaEvolution().getSchemaEvolutionEntryList().size() > 1 ? true : false;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
index f008821..3d9e068 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
@@ -61,7 +61,7 @@ public class TableSchema implements Serializable, Writable {
   /**
    * History of schema evolution of this table
    */
-  private SchemaEvolution schemaEvalution;
+  private SchemaEvolution schemaEvolution;
 
   /**
    * contains all key value pairs for table properties set by user in craete DDL
@@ -112,17 +112,17 @@ public class TableSchema implements Serializable, Writable {
   }
 
   /**
-   * @return the schemaEvalution
+   * @return the schemaEvolution
    */
-  public SchemaEvolution getSchemaEvalution() {
-    return schemaEvalution;
+  public SchemaEvolution getSchemaEvolution() {
+    return schemaEvolution;
   }
 
   /**
-   * @param schemaEvalution the schemaEvalution to set
+   * @param schemaEvolution the schemaEvolution to set
    */
-  public void setSchemaEvalution(SchemaEvolution schemaEvalution) {
-    this.schemaEvalution = schemaEvalution;
+  public void setSchemaEvolution(SchemaEvolution schemaEvolution) {
+    this.schemaEvolution = schemaEvolution;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
index 03d03f8..bb7e901 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
@@ -90,7 +90,7 @@ public class TableSchemaBuilder {
     schema.setBucketingInfo(null);
     SchemaEvolution schemaEvol = new SchemaEvolution();
     schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    schema.setSchemaEvalution(schemaEvol);
+    schema.setSchemaEvolution(schemaEvol);
     List<ColumnSchema> allColumns = new LinkedList<>(sortColumns);
     allColumns.addAll(dimension);
     allColumns.addAll(complex);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
index 9bbdca9..dfd5815 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
@@ -23,7 +23,7 @@ import java.util.List;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.format.FileHeader;
 
-import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchmeaToWrapperColumnSchema;
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.thrift.TBase;
 
@@ -78,7 +78,7 @@ public class CarbonHeaderReader {
     List<ColumnSchema> columnSchemaList = new ArrayList<>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (org.apache.carbondata.format.ColumnSchema table_column : table_columns) {
-      ColumnSchema col = thriftColumnSchmeaToWrapperColumnSchema(table_column);
+      ColumnSchema col = thriftColumnSchemaToWrapperColumnSchema(table_column);
       col.setColumnReferenceId(col.getColumnUniqueId());
       columnSchemaList.add(col);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index e30ad03..f005d88 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -89,7 +89,7 @@ public abstract class AbstractDataFileFooterConverter {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // get the segment info
       SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
@@ -151,7 +151,7 @@ public abstract class AbstractDataFileFooterConverter {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // get the segment info
       SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
@@ -284,7 +284,7 @@ public abstract class AbstractDataFileFooterConverter {
     return blockletIndex;
   }
 
-  protected ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+  protected ColumnSchema thriftColumnSchemaToWrapperColumnSchema(
       org.apache.carbondata.format.ColumnSchema externalColumnSchema) {
     ColumnSchema wrapperColumnSchema = new ColumnSchema();
     wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 9ccd772..1526047 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2204,7 +2204,7 @@ public final class CarbonUtil {
     return tableInfo;
   }
 
-  public static ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+  public static ColumnSchema thriftColumnSchemaToWrapperColumnSchema(
       org.apache.carbondata.format.ColumnSchema externalColumnSchema) {
     ColumnSchema wrapperColumnSchema = new ColumnSchema();
     wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());
@@ -2387,7 +2387,7 @@ public final class CarbonUtil {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // only columnSchema is the valid entry, reset all dummy entries.
       TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
@@ -2412,7 +2412,7 @@ public final class CarbonUtil {
     TableSchema tableSchema = new TableSchema();
     tableSchema.setTableName(tableName);
     tableSchema.setBucketingInfo(null);
-    tableSchema.setSchemaEvalution(null);
+    tableSchema.setSchemaEvolution(null);
     tableSchema.setTableId(UUID.randomUUID().toString());
     tableSchema.setListOfColumns(columnSchemaList);
 
@@ -2422,7 +2422,7 @@ public final class CarbonUtil {
     List<SchemaEvolutionEntry> schEntryList = new ArrayList<>();
     schEntryList.add(schemaEvolutionEntry);
     schemaEvol.setSchemaEvolutionEntryList(schEntryList);
-    tableSchema.setSchemaEvalution(schemaEvol);
+    tableSchema.setSchemaEvolution(schemaEvol);
     return tableSchema;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
index d665379..670536e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
@@ -60,7 +60,7 @@ public class DataFileFooterConverter extends AbstractDataFileFooterConverter {
       List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
       List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       dataFileFooter.setColumnInTable(columnSchemaList);
 
@@ -135,7 +135,7 @@ public class DataFileFooterConverter extends AbstractDataFileFooterConverter {
       FileFooter footer = reader.readFooter();
       List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
     } finally {
       if (null != fileReader) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
index 863e1df..07391dc 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
@@ -51,7 +51,7 @@ public class DataFileFooterConverter2 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     dataFileFooter.setColumnInTable(columnSchemaList);
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
index 214e217..6a968b4 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
@@ -61,7 +61,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     dataFileFooter.setColumnInTable(columnSchemaList);
     List<org.apache.carbondata.format.BlockletIndex> leaf_node_indices_Thrift =
@@ -91,7 +91,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     return columnSchemaList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java b/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
index d4bb344..67c7594 100644
--- a/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
@@ -1421,7 +1421,7 @@ public class ThriftWrapperSchemaConverterImplTest {
         return columnSchemas;
       }
 
-      @Mock public SchemaEvolution getSchemaEvalution() {
+      @Mock public SchemaEvolution getSchemaEvolution() {
         return schemaEvolution;
       }
 
@@ -1537,7 +1537,7 @@ public class ThriftWrapperSchemaConverterImplTest {
       final SchemaEvolution schemaEvolution = new SchemaEvolution();
       final Map mapTableProperties = new HashMap<String, String>();
 
-      @Mock public SchemaEvolution getSchemaEvalution() {
+      @Mock public SchemaEvolution getSchemaEvolution() {
         return schemaEvolution;
       }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 9fd1812..63acad3 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -278,7 +278,7 @@ public class StoreCreator {
     tableSchema.setListOfColumns(columnSchemas);
     SchemaEvolution schemaEvol = new SchemaEvolution();
     schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    tableSchema.setSchemaEvalution(schemaEvol);
+    tableSchema.setSchemaEvolution(schemaEvol);
     tableSchema.setTableId(UUID.randomUUID().toString());
     tableInfo.setTableUniqueName(
         identifier.getCarbonTableIdentifier().getTableUniqueName()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
----------------------------------------------------------------------
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
index 6a8c40d..f4415b8 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
@@ -310,7 +310,7 @@ object CarbonDataStoreCreator {
     val schemaEvol: SchemaEvolution = new SchemaEvolution()
     schemaEvol.setSchemaEvolutionEntryList(
       new util.ArrayList[SchemaEvolutionEntry]())
-    tableSchema.setSchemaEvalution(schemaEvol)
+    tableSchema.setSchemaEvolution(schemaEvol)
     tableSchema.setTableId(UUID.randomUUID().toString)
     tableInfo.setTableUniqueName(
       absoluteTableIdentifier.getCarbonTableIdentifier.getTableUniqueName

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index a830185..aa40a1f 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -694,7 +694,7 @@ class TableNewProcessor(cm: TableModel) {
     }
     tableSchema.setTableName(cm.tableName)
     tableSchema.setListOfColumns(allColumns.asJava)
-    tableSchema.setSchemaEvalution(schemaEvol)
+    tableSchema.setSchemaEvolution(schemaEvol)
     tableInfo.setDatabaseName(cm.databaseNameOp.getOrElse(null))
     tableInfo.setTableUniqueName(CarbonTable.buildUniqueName(cm.databaseNameOp.getOrElse(null),
       cm.tableName))

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index 8376136..0a23d06 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -358,7 +358,7 @@ object CarbonSource {
     tableInfo.setDatabaseName(identifier.getDatabaseName)
     val schemaEvolutionEntry = new SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     val map = if (!metaStore.isReadFromHiveMetaStore && isTransactionalTable) {
       metaStore.saveToDisk(tableInfo, identifier.getTablePath)
       new java.util.HashMap[String, String]()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
index 1da6507..934f5c7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
@@ -79,7 +79,7 @@ class SparkCarbonFileFormat extends FileFormat
       .getColumn_schema
     var colArray = ArrayBuffer[StructField]()
     for (i <- 0 to table_columns.size() - 1) {
-      val col = CarbonUtil.thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i))
+      val col = CarbonUtil.thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i))
       colArray += (new StructField(col.getColumnName,
         CarbonScalaUtil.convertCarbonToSparkDataType(col.getDataType), false))
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 2d24abf..81a6bed 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -369,7 +369,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
       absoluteTableIdentifier: AbsoluteTableIdentifier): String = {
     val schemaEvolutionEntry = new schema.SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     removeTableFromMetadata(tableInfo.getDatabaseName, tableInfo.getFactTable.getTableName)
     CarbonMetadata.getInstance().loadTableMetadata(tableInfo)
     addTableCache(tableInfo, absoluteTableIdentifier)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
index 1300c22..2e6ebee 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
@@ -186,7 +186,7 @@ class CarbonHiveMetaStore extends CarbonFileMetastore {
       absoluteTableIdentifier: AbsoluteTableIdentifier): String = {
     val schemaEvolutionEntry = new schema.SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     CarbonUtil.convertToMultiGsonStrings(tableInfo, " ", "", ",")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index d85bf4b..9ae940b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -17,20 +17,30 @@
 
 package org.apache.carbondata.sdk.file;
 
+import java.io.DataInputStream;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.reader.CarbonIndexFileReader;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
+
 /**
  * Reader for carbondata file
  */
@@ -102,6 +112,59 @@ public class CarbonReader<T> {
   }
 
   /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return null, if the index file is not present in the path.
+   * List<ColumnSchema> from the index file.
+   * @throws IOException
+   */
+  public static List<ColumnSchema> readSchemaInIndexFile(String indexFilePath) throws IOException {
+    CarbonFile indexFile =
+        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
+    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
+      throw new IOException("Not an index file name");
+    }
+    // read schema from the first index file
+    DataInputStream dataInputStream =
+        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
+    byte[] bytes = new byte[(int) indexFile.getSize()];
+    try {
+      //get the file in byte buffer
+      dataInputStream.readFully(bytes);
+      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
+      // read from byte buffer.
+      indexReader.openThriftReader(bytes);
+      // get the index header
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
+      }
+      return columnSchemaList;
+    } finally {
+      dataInputStream.close();
+    }
+  }
+
+  /**
+   * Read CarbonData file and return the user schema,
+   * the schema order is the same as user save schema
+   */
+  public static List<ColumnSchema> readUserSchema(String indexFilePath) throws IOException {
+    List<ColumnSchema> columnSchemas = readSchemaInIndexFile(indexFilePath);
+    Collections.sort(columnSchemas, new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+    return columnSchemas;
+  }
+
+  /**
    * Read schema file and return table info object
    */
   public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c7fff9e2/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 756dbe4..30d4091 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -17,14 +17,15 @@
 
 package org.apache.carbondata.sdk.file;
 
-import java.io.File;
-import java.io.FileFilter;
-import java.io.FilenameFilter;
-import java.io.IOException;
+import java.io.*;
 import java.sql.Date;
 import java.sql.Timestamp;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 
+import org.apache.avro.generic.GenericData;
+import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -36,10 +37,9 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import junit.framework.TestCase;
 import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.commons.lang.CharEncoding;
+import org.junit.*;
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
 
 public class CarbonReaderTest extends TestCase {
 
@@ -371,7 +371,8 @@ public class CarbonReaderTest extends TestCase {
     Assert.assertNotNull(dataFiles);
     Assert.assertTrue(dataFiles.length > 0);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
         .projection(new String[]{
             "stringField"
             , "shortField"
@@ -381,7 +382,343 @@ public class CarbonReaderTest extends TestCase {
             , "boolField"
             , "dateField"
             , "timeField"
-            , "decimalField"}).build();
+            , "decimalField"})
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadSchemaFileAndSort() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles = new File(path + "/Metadata").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("schema");
+      }
+    });
+    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
+
+    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
+
+    // sort the schema
+    Collections.sort(tableInfo.getFactTable().getListOfColumns(), new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    Assert.assertNotNull(dataFiles);
+    Assert.assertTrue(dataFiles.length > 0);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadSchemaInDataFileAndSort() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("carbondata");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
+
+    // sort the schema
+    Collections.sort(columns, new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadUserSchema() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("carbonindex");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
 
     int i = 0;
     while (reader.hasNext()) {
@@ -510,4 +847,101 @@ public class CarbonReaderTest extends TestCase {
       assert(row.length==0);
     }
   }
+
+  private void WriteAvroComplexData(String mySchema, String json, String[] sortColumns, String path)
+      throws IOException, InvalidLoadOptionException {
+
+    // conversion to GenericData.Record
+    org.apache.avro.Schema nn = new org.apache.avro.Schema.Parser().parse(mySchema);
+    JsonAvroConverter converter = new JsonAvroConverter();
+    GenericData.Record record = converter.convertToGenericDataRecord(
+        json.getBytes(CharEncoding.UTF_8), nn);
+
+    try {
+      CarbonWriter writer = CarbonWriter.builder()
+          .outputPath(path)
+          .isTransactionalTable(true)
+          .buildWriterForAvroInput(nn);
+
+      for (int i = 0; i < 100; i++) {
+        writer.write(record);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw e;
+    }
+  }
+
+  // TODO: support get schema of complex data type
+  @Ignore
+  public void testReadUserSchemaOfComplex() throws IOException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    String mySchema =
+        "{" +
+            "  \"name\": \"address\", " +
+            "   \"type\": \"record\", " +
+            "    \"fields\": [  " +
+            "  { \"name\": \"name\", \"type\": \"string\"}, " +
+            "  { \"name\": \"age\", \"type\": \"int\"}, " +
+            "  { " +
+            "    \"name\": \"address\", " +
+            "      \"type\": { " +
+            "    \"type\" : \"record\", " +
+            "        \"name\" : \"my_address\", " +
+            "        \"fields\" : [ " +
+            "    {\"name\": \"street\", \"type\": \"string\"}, " +
+            "    {\"name\": \"city\", \"type\": \"string\"} " +
+            "  ]} " +
+            "  }, " +
+            "  {\"name\" :\"doorNum\", " +
+            "   \"type\" : { " +
+            "   \"type\" :\"array\", " +
+            "   \"items\":{ " +
+            "   \"name\" :\"EachdoorNums\", " +
+            "   \"type\" : \"int\", " +
+            "   \"default\":-1} " +
+            "              } " +
+            "  }] " +
+            "}";
+
+    String json = "{\"name\":\"bob\", \"age\":10, \"address\" : {\"street\":\"abc\", \"city\":\"bang\"}, "
+        + "   \"doorNum\" : [1,2,3,4]}";
+
+    try {
+      WriteAvroComplexData(mySchema, json, null, path);
+    } catch (InvalidLoadOptionException e) {
+      e.printStackTrace();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
+      @Override
+      public boolean accept(File pathname) {
+        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
+      }
+    });
+    Assert.assertNotNull(dataFiles);
+    Assert.assertEquals(1, dataFiles.length);
+
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override
+      public boolean accept(File dir, String name) {
+        return name.endsWith("carbonindex");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+
+    for (int i = 0; i < columns.size(); i++) {
+      System.out.println(columns.get(i).getColumnName() + "\t" + columns.get(i).getSchemaOrdinal());
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
+
 }


[14/26] carbondata git commit: [CARBONDATA-2546] Fixed the ArrayIndexOutOfBoundsException when give same column twice in projection of CarbonReader

Posted by ra...@apache.org.
[CARBONDATA-2546] Fixed the ArrayIndexOutOfBoundsException when give same column twice in projection of CarbonReader

This closes #2348


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/df29e4fb
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/df29e4fb
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/df29e4fb

Branch: refs/heads/branch-1.4
Commit: df29e4fb0c3cd00e53b9a9d8d5891fc74c8d6cb2
Parents: 6d245b9
Author: xubo245 <xu...@huawei.com>
Authored: Mon May 28 19:24:46 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../carbondata/hadoop/CarbonRecordReader.java   |  1 -
 .../hadoop/api/CarbonInputFormat.java           | 27 ++++++++
 .../TestNonTransactionalCarbonTable.scala       | 11 +++-
 .../sdk/file/CarbonReaderBuilder.java           |  3 +-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 66 +++++++++++++++-----
 5 files changed, 87 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/df29e4fb/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index d4b091c..cad20fc 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -90,7 +90,6 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
 
   @Override public boolean nextKeyValue() {
     return carbonIterator.hasNext();
-
   }
 
   @Override public Void getCurrentKey() throws IOException, InterruptedException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df29e4fb/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index 91da93f..cf51162 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -24,6 +24,7 @@ import java.lang.reflect.Constructor;
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
+import java.util.Objects;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
@@ -190,6 +191,32 @@ m filterExpression
     }
   }
 
+  /**
+   * Set the column projection column names
+   *
+   * @param configuration     Configuration info
+   * @param projectionColumns projection columns name
+   */
+  public static void setColumnProjection(Configuration configuration, String[] projectionColumns) {
+    Objects.requireNonNull(projectionColumns);
+    if (projectionColumns.length < 1) {
+      throw new RuntimeException("Projection can't be empty");
+    }
+    StringBuilder builder = new StringBuilder();
+    for (String column : projectionColumns) {
+      builder.append(column).append(",");
+    }
+    String columnString = builder.toString();
+    columnString = columnString.substring(0, columnString.length() - 1);
+    configuration.set(COLUMN_PROJECTION, columnString);
+  }
+
+  /**
+   * Set the column projection column names from CarbonProjection
+   *
+   * @param configuration Configuration info
+   * @param projection    CarbonProjection object that includes unique projection column name
+   */
   public static void setColumnProjection(Configuration configuration, CarbonProjection projection) {
     if (projection == null || projection.isEmpty()) {
       return;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df29e4fb/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index afb9b2f..61b37d5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -317,7 +317,10 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-
+    checkAnswer(sql("SELECT name,name FROM sdkOutputTable"), Seq(
+      Row("robot0", "robot0"),
+      Row("robot1", "robot1"),
+      Row("robot2", "robot2")))
     checkAnswer(sql("select * from sdkOutputTable"), Seq(
       Row("robot0", 0, 0.0),
       Row("robot1", 1, 0.5),
@@ -1529,6 +1532,12 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
+    sql("SELECT name,name FROM sdkOutputTable").show()
+    checkAnswer(sql("SELECT name,name FROM sdkOutputTable"), Seq(
+      Row("bob", "bob"),
+      Row("bob", "bob"),
+      Row("bob", "bob")))
+
     sql("select * from sdkOutputTable").show(false)
 
     // TODO: Add a validation

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df29e4fb/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 4103c63..1e73e8c 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -28,7 +28,6 @@ import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.hadoop.CarbonProjection;
 import org.apache.carbondata.hadoop.api.CarbonFileInputFormat;
 
 import org.apache.hadoop.conf.Configuration;
@@ -224,7 +223,7 @@ public class CarbonReaderBuilder {
     if (isProjectAllColumns) {
       projectAllColumns();
     }
-    format.setColumnProjection(job.getConfiguration(), new CarbonProjection(projectionColumns));
+    format.setColumnProjection(job.getConfiguration(), projectionColumns);
 
     final List<InputSplit> splits =
         format.getSplits(new JobContextImpl(job.getConfiguration(), new JobID()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/df29e4fb/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 7a2a765..ee095a1 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -105,6 +105,47 @@ public class CarbonReaderTest extends TestCase {
   }
 
   @Test
+  public void testReadColumnTwice() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "name", "age", "name"})
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(name[i], row[1]);
+      Assert.assertEquals(age[i], row[2]);
+      Assert.assertEquals(name[i], row[3]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
   public void testReadFilesParallel() throws IOException, InterruptedException {
     String path = "./testWriteFiles";
     FileUtils.deleteDirectory(new File(path));
@@ -836,23 +877,14 @@ public class CarbonReaderTest extends TestCase {
 
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
 
-    CarbonReader reader = CarbonReader
-        .builder(path, "_temp")
-        .projection(new String[]{})
-        .build();
-
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-    // Default sort column is applied for dimensions. So, need  to validate accordingly
-
-    while (reader.hasNext()) {
-      Object[] row = (Object[]) reader.readNextRow();
-      assert(row.length==0);
+    try {
+      CarbonReader reader = CarbonReader
+          .builder(path, "_temp")
+          .projection(new String[]{})
+          .build();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equalsIgnoreCase("Projection can't be empty"));
     }
   }
 


[10/26] carbondata git commit: [CARBONDATA-2519] Add document for CarbonReader

Posted by ra...@apache.org.
[CARBONDATA-2519] Add document for CarbonReader

Add document for CarbonReader

This closes #2337


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/9a90e17b
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/9a90e17b
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/9a90e17b

Branch: refs/heads/branch-1.4
Commit: 9a90e17ba0aa66501f44f4bf53fc1cca81e2fe3f
Parents: df9978a
Author: xubo245 <xu...@huawei.com>
Authored: Wed May 23 21:45:49 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 README.md                                       |   2 +-
 docs/sdk-guide.md                               | 591 +++++++++++++++++++
 docs/sdk-writer-guide.md                        | 400 -------------
 .../carbondata/sdk/file/CarbonReader.java       |   2 +-
 .../sdk/file/CarbonReaderBuilder.java           |  56 +-
 .../sdk/file/CarbonWriterBuilder.java           |   8 +-
 6 files changed, 642 insertions(+), 417 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/9a90e17b/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index be3186c..d8f7226 100644
--- a/README.md
+++ b/README.md
@@ -52,7 +52,7 @@ CarbonData is built using Apache Maven, to [build CarbonData](https://github.com
 * [Cluster Installation and Deployment](https://github.com/apache/carbondata/blob/master/docs/installation-guide.md)
 * [Configuring Carbondata](https://github.com/apache/carbondata/blob/master/docs/configuration-parameters.md)
 * [Streaming Ingestion](https://github.com/apache/carbondata/blob/master/docs/streaming-guide.md)
-* [SDK Writer Guide](https://github.com/apache/carbondata/blob/master/docs/sdk-writer-guide.md)
+* [SDK Guide](https://github.com/apache/carbondata/blob/master/docs/sdk-guide.md)
 * [CarbonData Pre-aggregate DataMap](https://github.com/apache/carbondata/blob/master/docs/datamap/preaggregate-datamap-guide.md)
 * [CarbonData Timeseries DataMap](https://github.com/apache/carbondata/blob/master/docs/datamap/timeseries-datamap-guide.md)
 * [FAQ](https://github.com/apache/carbondata/blob/master/docs/faq.md)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9a90e17b/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
new file mode 100644
index 0000000..4d258f0
--- /dev/null
+++ b/docs/sdk-guide.md
@@ -0,0 +1,591 @@
+# SDK Guide
+In the carbon jars package, there exist a carbondata-store-sdk-x.x.x-SNAPSHOT.jar, including SDK writer and reader.
+# SDK Writer
+This SDK writer, writes carbondata file and carbonindex file at a given path.
+External client can make use of this writer to convert other format data or live data to create carbondata and index files.
+These SDK writer output contains just a carbondata and carbonindex files. No metadata folder will be present.
+
+## Quick example
+
+### Example with csv format 
+
+```java
+ import java.io.IOException;
+ 
+ import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
+ import org.apache.carbondata.core.metadata.datatype.DataTypes;
+ import org.apache.carbondata.core.util.CarbonProperties;
+ import org.apache.carbondata.sdk.file.CarbonWriter;
+ import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
+ import org.apache.carbondata.sdk.file.Field;
+ import org.apache.carbondata.sdk.file.Schema;
+ 
+ public class TestSdk {
+
+   // pass true or false while executing the main to use offheap memory or not
+   public static void main(String[] args) throws IOException, InvalidLoadOptionException {
+     if (args.length > 0 && args[0] != null) {
+       testSdkWriter(args[0]);
+     } else {
+       testSdkWriter("true");
+     }
+   }
+ 
+   public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
+     String path = "./target/testCSVSdkWriter";
+ 
+     Field[] fields = new Field[2];
+     fields[0] = new Field("name", DataTypes.STRING);
+     fields[1] = new Field("age", DataTypes.INT);
+ 
+     Schema schema = new Schema(fields);
+
+     CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
+ 
+     CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path);
+ 
+     CarbonWriter writer = builder.buildWriterForCSVInput(schema);
+ 
+     int rows = 5;
+     for (int i = 0; i < rows; i++) {
+       writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
+     }
+     writer.close();
+   }
+ }
+```
+
+### Example with Avro format
+```java
+import java.io.IOException;
+
+import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.sdk.file.AvroCarbonWriter;
+import org.apache.carbondata.sdk.file.CarbonWriter;
+import org.apache.carbondata.sdk.file.Field;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.commons.lang.CharEncoding;
+
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
+
+public class TestSdkAvro {
+
+  public static void main(String[] args) throws IOException, InvalidLoadOptionException {
+    testSdkWriter();
+  }
+
+
+  public static void testSdkWriter() throws IOException, InvalidLoadOptionException {
+    String path = "./AvroCarbonWriterSuiteWriteFiles";
+    // Avro schema
+    String avroSchema =
+        "{" +
+            "   \"type\" : \"record\"," +
+            "   \"name\" : \"Acme\"," +
+            "   \"fields\" : ["
+            + "{ \"name\" : \"fname\", \"type\" : \"string\" },"
+            + "{ \"name\" : \"age\", \"type\" : \"int\" }]" +
+            "}";
+
+    String json = "{\"fname\":\"bob\", \"age\":10}";
+
+    // conversion to GenericData.Record
+    JsonAvroConverter converter = new JsonAvroConverter();
+    GenericData.Record record = converter.convertToGenericDataRecord(
+        json.getBytes(CharEncoding.UTF_8), new org.apache.avro.Schema.Parser().parse(avroSchema));
+
+    try {
+      CarbonWriter writer = CarbonWriter.builder()
+          .outputPath(path)
+          .buildWriterForAvroInput(new org.apache.avro.Schema.Parser().parse(avroSchema));
+
+      for (int i = 0; i < 100; i++) {
+        writer.write(record);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+}
+```
+
+## Datatypes Mapping
+Each of SQL data types are mapped into data types of SDK. Following are the mapping:
+
+| SQL DataTypes | Mapped SDK DataTypes |
+|---------------|----------------------|
+| BOOLEAN | DataTypes.BOOLEAN |
+| SMALLINT | DataTypes.SHORT |
+| INTEGER | DataTypes.INT |
+| BIGINT | DataTypes.LONG |
+| DOUBLE | DataTypes.DOUBLE |
+| VARCHAR | DataTypes.STRING |
+| DATE | DataTypes.DATE |
+| TIMESTAMP | DataTypes.TIMESTAMP |
+| STRING | DataTypes.STRING |
+| DECIMAL | DataTypes.createDecimalType(precision, scale) |
+
+
+## API List
+
+### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
+```
+/**
+* Sets the output path of the writer builder
+* @param path is the absolute path where output files are written
+*             This method must be called when building CarbonWriterBuilder
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder outputPath(String path);
+```
+
+```
+/**
+* If set false, writes the carbondata and carbonindex files in a flat folder structure
+* @param isTransactionalTable is a boolelan value
+*             if set to false, then writes the carbondata and carbonindex files
+*                                                            in a flat folder structure.
+*             if set to true, then writes the carbondata and carbonindex files
+*                                                            in segment folder structure..
+*             By default set to false.
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder isTransactionalTable(boolean isTransactionalTable);
+```
+
+```
+/**
+* to set the timestamp in the carbondata and carbonindex index files
+* @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
+*             By default set to zero.
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder uniqueIdentifier(long UUID);
+```
+
+```
+/**
+* To set the carbondata file size in MB between 1MB-2048MB
+* @param blockSize is size in MB between 1MB to 2048 MB
+*                  default value is 1024 MB
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder withBlockSize(int blockSize);
+```
+
+```
+/**
+* To set the blocklet size of carbondata file
+* @param blockletSize is blocklet size in MB
+*                     default value is 64 MB
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder withBlockletSize(int blockletSize);
+```
+
+```
+/**
+* sets the list of columns that needs to be in sorted order
+* @param sortColumns is a string array of columns that needs to be sorted.
+*                    If it is null or by default all dimensions are selected for sorting
+*                    If it is empty array, no columns are sorted
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder sortBy(String[] sortColumns);
+```
+
+```
+/**
+* If set, create a schema file in metadata folder.
+* @param persist is a boolean value, If set to true, creates a schema file in metadata folder.
+*                By default set to false. will not create metadata folder
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder persistSchemaFile(boolean persist);
+```
+
+```
+/**
+* sets the taskNo for the writer. SDKs concurrently running
+* will set taskNo in order to avoid conflicts in file's name during write.
+* @param taskNo is the TaskNo user wants to specify.
+*               by default it is system time in nano seconds.
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder taskNo(String taskNo);
+```
+
+```
+/**
+* To support the load options for sdk writer
+* @param options key,value pair of load options.
+*                supported keys values are
+*                a. bad_records_logger_enable -- true (write into separate logs), false
+*                b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
+*                c. bad_record_path -- path
+*                d. dateformat -- same as JAVA SimpleDateFormat
+*                e. timestampformat -- same as JAVA SimpleDateFormat
+*                f. complex_delimiter_level_1 -- value to Split the complexTypeData
+*                g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
+*                h. quotechar
+*                i. escapechar
+*
+*                Default values are as follows.
+*
+*                a. bad_records_logger_enable -- "false"
+*                b. bad_records_action -- "FAIL"
+*                c. bad_record_path -- ""
+*                d. dateformat -- "" , uses from carbon.properties file
+*                e. timestampformat -- "", uses from carbon.properties file
+*                f. complex_delimiter_level_1 -- "$"
+*                g. complex_delimiter_level_2 -- ":"
+*                h. quotechar -- "\""
+*                i. escapechar -- "\\"
+*
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
+```
+
+```
+/**
+* Build a {@link CarbonWriter}, which accepts row in CSV format object
+* @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
+* @return CSVCarbonWriter
+* @throws IOException
+* @throws InvalidLoadOptionException
+*/
+public CarbonWriter buildWriterForCSVInput() throws IOException, InvalidLoadOptionException;
+```
+
+```  
+/**
+* Build a {@link CarbonWriter}, which accepts Avro format object
+* @param avroSchema avro Schema object {org.apache.avro.Schema}
+* @return AvroCarbonWriter 
+* @throws IOException
+* @throws InvalidLoadOptionException
+*/
+public CarbonWriter buildWriterForAvroInput() throws IOException, InvalidLoadOptionException;
+```
+
+### Class org.apache.carbondata.sdk.file.CarbonWriter
+```
+/**
+* Write an object to the file, the format of the object depends on the implementation
+* If AvroCarbonWriter, object is of type org.apache.avro.generic.GenericData.Record 
+* If CSVCarbonWriter, object is of type String[]
+* Note: This API is not thread safe
+* @param object
+* @throws IOException
+*/
+public abstract void write(Object object) throws IOException;
+```
+
+```
+/**
+* Flush and close the writer
+*/
+public abstract void close() throws IOException;
+```
+
+```
+/**
+* Create a {@link CarbonWriterBuilder} to build a {@link CarbonWriter}
+*/
+public static CarbonWriterBuilder builder() {
+    return new CarbonWriterBuilder();
+}
+```
+
+### Class org.apache.carbondata.sdk.file.Field
+```
+/**
+* Field Constructor
+* @param name name of the field
+* @param type datatype of field, specified in strings.
+*/
+public Field(String name, String type);
+```
+
+```
+/**
+* Field constructor
+* @param name name of the field
+* @param type datatype of the field of class DataType
+*/
+public Field(String name, DataType type);  
+```
+
+### Class org.apache.carbondata.sdk.file.Schema
+
+```
+/**
+* construct a schema with fields
+* @param fields
+*/
+public Schema(Field[] fields);
+```
+
+```
+/**
+* Create a Schema using JSON string, for example:
+* [
+*   {"name":"string"},
+*   {"age":"int"}
+* ] 
+* @param json specified as string
+* @return Schema
+*/
+public static Schema parseJson(String json);
+```
+
+### Class org.apache.carbondata.core.util.CarbonProperties
+
+```
+/**
+* This method will be responsible to get the instance of CarbonProperties class
+*
+* @return carbon properties instance
+*/
+public static CarbonProperties getInstance();
+```
+
+```
+/**
+* This method will be used to add a new property
+*
+* @param key is a property name to set for carbon.
+* @param value is valid parameter corresponding to property.
+* @return CarbonProperties object
+*/
+public CarbonProperties addProperty(String key, String value);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value.
+* @return properties value for corresponding key. If not set, then returns null.
+*/
+public String getProperty(String key);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value..
+* @param defaultValue used to be returned by function if corrosponding key not set.
+* @return properties value for corresponding key. If not set, then returns specified defaultValue.
+*/
+public String getProperty(String key, String defaultValue);
+```
+Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
+
+### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
+```
+/**
+* converts avro schema to carbon schema, required by carbonWriter
+*
+* @param avroSchemaString json formatted avro schema as string
+* @return carbon sdk schema
+*/
+public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(String avroSchemaString);
+```
+# SDK Reader
+This SDK reader reads CarbonData file and carbonindex file at a given path.
+External client can make use of this reader to read CarbonData files without CarbonSession.
+## Quick example
+```
+    // 1. Create carbon reader
+    String path = "./testWriteFiles";
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+
+    // 2. Read data
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      System.out.println(row[0] + "\t" + row[1]);
+      i++;
+    }
+    
+    // 3. Close this reader
+    reader.close();
+```
+
+Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java) in the CarbonData repo.
+
+## API List
+
+### Class org.apache.carbondata.sdk.file.CarbonReader
+```
+ /**
+  * Return a new CarbonReaderBuilder instance
+  */
+  public static CarbonReaderBuilder builder(String tablePath, String tableName);
+```
+
+```
+  /**
+   * Read carbondata file and return the schema
+   */
+  public static List<ColumnSchema> readSchemaInDataFile(String dataFilePath);
+```
+
+```
+ /**
+  * Read schema file and return table info object
+  */
+  public static TableInfo readSchemaFile(String schemaFilePath);
+```
+
+```
+  /**
+   * Return true if has next row
+   */
+  public boolean hasNext();
+```
+
+```
+  /**
+   * Read and return next row object
+   */
+  public T readNextRow();
+```
+
+```
+  /**
+   * Close reader
+   */
+  public void close();
+```
+
+### Class org.apache.carbondata.sdk.file.CarbonReaderBuilder
+```
+  /**
+   * Construct a CarbonReaderBuilder with table path and table name
+   *
+   * @param tablePath table path
+   * @param tableName table name
+   */
+  CarbonReaderBuilder(String tablePath, String tableName);
+```
+
+```
+  /**
+   * Configure the projection column names of carbon reader
+   *
+   * @param projectionColumnNames projection column names
+   * @return CarbonReaderBuilder object
+   */
+  public CarbonReaderBuilder projection(String[] projectionColumnNames);
+```
+
+```
+  /**
+   * Configure the transactional status of table
+   * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
+   * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
+   * Default value is true
+   *
+   * @param isTransactionalTable whether is transactional table or not
+   * @return CarbonReaderBuilder object
+   */
+  public CarbonReaderBuilder isTransactionalTable(boolean isTransactionalTable);
+```
+
+```
+ /**
+  * Configure the filter expression for carbon reader
+  *
+  * @param filterExpression filter expression
+  * @return CarbonReaderBuilder object
+  */
+  public CarbonReaderBuilder filter(Expression filterExpression);
+```
+
+```
+  /**
+   * Set the access key for S3
+   *
+   * @param key   the string of access key for different S3 type,like: fs.s3a.access.key
+   * @param value the value of access key
+   * @return CarbonWriterBuilder
+   */
+  public CarbonReaderBuilder setAccessKey(String key, String value);
+```
+
+```
+  /**
+   * Set the access key for S3.
+   *
+   * @param value the value of access key
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setAccessKey(String value);
+```
+
+```
+  /**
+   * Set the secret key for S3
+   *
+   * @param key   the string of secret key for different S3 type,like: fs.s3a.secret.key
+   * @param value the value of secret key
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setSecretKey(String key, String value);
+```
+
+```
+  /**
+   * Set the secret key for S3
+   *
+   * @param value the value of secret key
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setSecretKey(String value);
+```
+
+```
+ /**
+   * Set the endpoint for S3
+   *
+   * @param key   the string of endpoint for different S3 type,like: fs.s3a.endpoint
+   * @param value the value of endpoint
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setEndPoint(String key, String value);
+```
+
+``` 
+  /**
+   * Set the endpoint for S3
+   *
+   * @param value the value of endpoint
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setEndPoint(String value);
+```
+
+```
+ /**
+   * Build CarbonReader
+   *
+   * @param <T>
+   * @return CarbonReader
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public <T> CarbonReader<T> build();
+```
+Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9a90e17b/docs/sdk-writer-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-writer-guide.md b/docs/sdk-writer-guide.md
deleted file mode 100644
index 3d9a3de..0000000
--- a/docs/sdk-writer-guide.md
+++ /dev/null
@@ -1,400 +0,0 @@
-# SDK Writer Guide
-In the carbon jars package, there exist a carbondata-store-sdk-x.x.x-SNAPSHOT.jar.
-This SDK writer, writes carbondata file and carbonindex file at a given path.
-External client can make use of this writer to convert other format data or live data to create carbondata and index files.
-These SDK writer output contains just a carbondata and carbonindex files. No metadata folder will be present.
-
-## Quick example
-
-### Example with csv format 
-
-```java
- import java.io.IOException;
- 
- import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
- import org.apache.carbondata.core.metadata.datatype.DataTypes;
- import org.apache.carbondata.core.util.CarbonProperties;
- import org.apache.carbondata.sdk.file.CarbonWriter;
- import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
- import org.apache.carbondata.sdk.file.Field;
- import org.apache.carbondata.sdk.file.Schema;
- 
- public class TestSdk {
-
-   // pass true or false while executing the main to use offheap memory or not
-   public static void main(String[] args) throws IOException, InvalidLoadOptionException {
-     if (args.length > 0 && args[0] != null) {
-       testSdkWriter(args[0]);
-     } else {
-       testSdkWriter("true");
-     }
-   }
- 
-   public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
-     String path = "./target/testCSVSdkWriter";
- 
-     Field[] fields = new Field[2];
-     fields[0] = new Field("name", DataTypes.STRING);
-     fields[1] = new Field("age", DataTypes.INT);
- 
-     Schema schema = new Schema(fields);
-
-     CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
- 
-     CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path);
- 
-     CarbonWriter writer = builder.buildWriterForCSVInput(schema);
- 
-     int rows = 5;
-     for (int i = 0; i < rows; i++) {
-       writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
-     }
-     writer.close();
-   }
- }
-```
-
-### Example with Avro format
-```java
-import java.io.IOException;
-
-import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.sdk.file.AvroCarbonWriter;
-import org.apache.carbondata.sdk.file.CarbonWriter;
-import org.apache.carbondata.sdk.file.Field;
-
-import org.apache.avro.generic.GenericData;
-import org.apache.commons.lang.CharEncoding;
-
-import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
-
-public class TestSdkAvro {
-
-  public static void main(String[] args) throws IOException, InvalidLoadOptionException {
-    testSdkWriter();
-  }
-
-
-  public static void testSdkWriter() throws IOException, InvalidLoadOptionException {
-    String path = "./AvroCarbonWriterSuiteWriteFiles";
-    // Avro schema
-    String avroSchema =
-        "{" +
-            "   \"type\" : \"record\"," +
-            "   \"name\" : \"Acme\"," +
-            "   \"fields\" : ["
-            + "{ \"name\" : \"fname\", \"type\" : \"string\" },"
-            + "{ \"name\" : \"age\", \"type\" : \"int\" }]" +
-            "}";
-
-    String json = "{\"fname\":\"bob\", \"age\":10}";
-
-    // conversion to GenericData.Record
-    JsonAvroConverter converter = new JsonAvroConverter();
-    GenericData.Record record = converter.convertToGenericDataRecord(
-        json.getBytes(CharEncoding.UTF_8), new org.apache.avro.Schema.Parser().parse(avroSchema));
-
-    try {
-      CarbonWriter writer = CarbonWriter.builder()
-          .outputPath(path)
-          .buildWriterForAvroInput(new org.apache.avro.Schema.Parser().parse(avroSchema));
-
-      for (int i = 0; i < 100; i++) {
-        writer.write(record);
-      }
-      writer.close();
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-}
-```
-
-## Datatypes Mapping
-Each of SQL data types are mapped into data types of SDK. Following are the mapping:
-
-| SQL DataTypes | Mapped SDK DataTypes |
-|---------------|----------------------|
-| BOOLEAN | DataTypes.BOOLEAN |
-| SMALLINT | DataTypes.SHORT |
-| INTEGER | DataTypes.INT |
-| BIGINT | DataTypes.LONG |
-| DOUBLE | DataTypes.DOUBLE |
-| VARCHAR | DataTypes.STRING |
-| DATE | DataTypes.DATE |
-| TIMESTAMP | DataTypes.TIMESTAMP |
-| STRING | DataTypes.STRING |
-| DECIMAL | DataTypes.createDecimalType(precision, scale) |
-
-
-## API List
-
-### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
-```
-/**
-* Sets the output path of the writer builder
-* @param path is the absolute path where output files are written
-*             This method must be called when building CarbonWriterBuilder
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder outputPath(String path);
-```
-
-```
-/**
-* If set false, writes the carbondata and carbonindex files in a flat folder structure
-* @param isTransactionalTable is a boolelan value
-*             if set to false, then writes the carbondata and carbonindex files
-*                                                            in a flat folder structure.
-*             if set to true, then writes the carbondata and carbonindex files
-*                                                            in segment folder structure..
-*             By default set to false.
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder isTransactionalTable(boolean isTransactionalTable);
-```
-
-```
-/**
-* to set the timestamp in the carbondata and carbonindex index files
-* @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
-*             By default set to zero.
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder uniqueIdentifier(long UUID);
-```
-
-```
-/**
-* To set the carbondata file size in MB between 1MB-2048MB
-* @param blockSize is size in MB between 1MB to 2048 MB
-*                  default value is 1024 MB
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withBlockSize(int blockSize);
-```
-
-```
-/**
-* To set the blocklet size of carbondata file
-* @param blockletSize is blocklet size in MB
-*                     default value is 64 MB
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withBlockletSize(int blockletSize);
-```
-
-```
-/**
-* sets the list of columns that needs to be in sorted order
-* @param sortColumns is a string array of columns that needs to be sorted.
-*                    If it is null or by default all dimensions are selected for sorting
-*                    If it is empty array, no columns are sorted
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder sortBy(String[] sortColumns);
-```
-
-```
-/**
-* If set, create a schema file in metadata folder.
-* @param persist is a boolean value, If set to true, creates a schema file in metadata folder.
-*                By default set to false. will not create metadata folder
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder persistSchemaFile(boolean persist);
-```
-
-```
-/**
-* sets the taskNo for the writer. SDKs concurrently running
-* will set taskNo in order to avoid conflicts in file's name during write.
-* @param taskNo is the TaskNo user wants to specify.
-*               by default it is system time in nano seconds.
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder taskNo(String taskNo);
-```
-
-```
-/**
-* To support the load options for sdk writer
-* @param options key,value pair of load options.
-*                supported keys values are
-*                a. bad_records_logger_enable -- true (write into separate logs), false
-*                b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
-*                c. bad_record_path -- path
-*                d. dateformat -- same as JAVA SimpleDateFormat
-*                e. timestampformat -- same as JAVA SimpleDateFormat
-*                f. complex_delimiter_level_1 -- value to Split the complexTypeData
-*                g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
-*                h. quotechar
-*                i. escapechar
-*
-*                Default values are as follows.
-*
-*                a. bad_records_logger_enable -- "false"
-*                b. bad_records_action -- "FAIL"
-*                c. bad_record_path -- ""
-*                d. dateformat -- "" , uses from carbon.properties file
-*                e. timestampformat -- "", uses from carbon.properties file
-*                f. complex_delimiter_level_1 -- "$"
-*                g. complex_delimiter_level_2 -- ":"
-*                h. quotechar -- "\""
-*                i. escapechar -- "\\"
-*
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
-```
-
-```
-/**
-* Build a {@link CarbonWriter}, which accepts row in CSV format object
-* @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
-* @return CSVCarbonWriter
-* @throws IOException
-* @throws InvalidLoadOptionException
-*/
-public CarbonWriter buildWriterForCSVInput() throws IOException, InvalidLoadOptionException;
-```
-
-```  
-/**
-* Build a {@link CarbonWriter}, which accepts Avro format object
-* @param avroSchema avro Schema object {org.apache.avro.Schema}
-* @return AvroCarbonWriter 
-* @throws IOException
-* @throws InvalidLoadOptionException
-*/
-public CarbonWriter buildWriterForAvroInput() throws IOException, InvalidLoadOptionException;
-```
-
-### Class org.apache.carbondata.sdk.file.CarbonWriter
-```
-/**
-* Write an object to the file, the format of the object depends on the implementation
-* If AvroCarbonWriter, object is of type org.apache.avro.generic.GenericData.Record 
-* If CSVCarbonWriter, object is of type String[]
-* Note: This API is not thread safe
-* @param object
-* @throws IOException
-*/
-public abstract void write(Object object) throws IOException;
-```
-
-```
-/**
-* Flush and close the writer
-*/
-public abstract void close() throws IOException;
-```
-
-```
-/**
-* Create a {@link CarbonWriterBuilder} to build a {@link CarbonWriter}
-*/
-public static CarbonWriterBuilder builder() {
-return new CarbonWriterBuilder();
-}
-```
-
-### Class org.apache.carbondata.sdk.file.Field
-```
-/**
-* Field Constructor
-* @param name name of the field
-* @param type datatype of field, specified in strings.
-*/
-public Field(String name, String type);
-```
-
-```
-/**
-* Field constructor
-* @param name name of the field
-* @param type datatype of the field of class DataType
-*/
-public Field(String name, DataType type);  
-```
-
-### Class org.apache.carbondata.sdk.file.Schema
-
-```
-/**
-* construct a schema with fields
-* @param fields
-*/
-public Schema(Field[] fields);
-```
-
-```
-/**
-* Create a Schema using JSON string, for example:
-* [
-*   {"name":"string"},
-*   {"age":"int"}
-* ] 
-* @param json specified as string
-* @return Schema
-*/
-public static Schema parseJson(String json);
-```
-
-### Class org.apache.carbondata.core.util.CarbonProperties
-
-```
-/**
-* This method will be responsible to get the instance of CarbonProperties class
-*
-* @return carbon properties instance
-*/
-public static CarbonProperties getInstance();
-```
-
-```
-/**
-* This method will be used to add a new property
-*
-* @param key is a property name to set for carbon.
-* @param value is valid parameter corresponding to property.
-* @return CarbonProperties object
-*/
-public CarbonProperties addProperty(String key, String value);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value.
-* @return properties value for corresponding key. If not set, then returns null.
-*/
-public String getProperty(String key);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value..
-* @param defaultValue used to be returned by function if corrosponding key not set.
-* @return properties value for corresponding key. If not set, then returns specified defaultValue.
-*/
-public String getProperty(String key, String defaultValue);
-```
-Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
-
-### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
-```
-/**
-* converts avro schema to carbon schema, required by carbonWriter
-*
-* @param avroSchemaString json formatted avro schema as string
-* @return carbon sdk schema
-*/
-public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(String avroSchemaString);
-```

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9a90e17b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 6517e89..d85bf4b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -102,7 +102,7 @@ public class CarbonReader<T> {
   }
 
   /**
-   * Read schmea file and return table info object
+   * Read schema file and return table info object
    */
   public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {
     org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.readSchemaFile(schemaFilePath);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9a90e17b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 946ea0f..c78cda0 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -50,26 +50,53 @@ public class CarbonReaderBuilder {
   private String tableName;
   private boolean isTransactionalTable = true;
 
+  /**
+   * Construct a CarbonReaderBuilder with table path and table name
+   *
+   * @param tablePath table path
+   * @param tableName table name
+   */
   CarbonReaderBuilder(String tablePath, String tableName) {
     this.tablePath = tablePath;
     this.tableName = tableName;
   }
 
+  /**
+   * Configure the projection column names of carbon reader
+   *
+   * @param projectionColumnNames projection column names
+   * @return CarbonReaderBuilder object
+   */
   public CarbonReaderBuilder projection(String[] projectionColumnNames) {
     Objects.requireNonNull(projectionColumnNames);
     this.projectionColumns = projectionColumnNames;
     return this;
   }
 
+  /**
+   * Configure the transactional status of table
+   * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
+   * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
+   * Default value is true
+   *
+   * @param isTransactionalTable whether is transactional table or not
+   * @return CarbonReaderBuilder object
+   */
   public CarbonReaderBuilder isTransactionalTable(boolean isTransactionalTable) {
     Objects.requireNonNull(isTransactionalTable);
     this.isTransactionalTable = isTransactionalTable;
     return this;
   }
 
-  public CarbonReaderBuilder filter(Expression fileterExpression) {
-    Objects.requireNonNull(fileterExpression);
-    this.filterExpression = fileterExpression;
+  /**
+   * Configure the filter expression for carbon reader
+   *
+   * @param filterExpression filter expression
+   * @return CarbonReaderBuilder object
+   */
+  public CarbonReaderBuilder filter(Expression filterExpression) {
+    Objects.requireNonNull(filterExpression);
+    this.filterExpression = filterExpression;
     return this;
   }
 
@@ -78,7 +105,7 @@ public class CarbonReaderBuilder {
    *
    * @param key   the string of access key for different S3 type,like: fs.s3a.access.key
    * @param value the value of access key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setAccessKey(String key, String value) {
     FileFactory.getConfiguration().set(key, value);
@@ -89,7 +116,7 @@ public class CarbonReaderBuilder {
    * Set the access key for S3.
    *
    * @param value the value of access key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setAccessKey(String value) {
     return setAccessKey(Constants.ACCESS_KEY, value);
@@ -100,7 +127,7 @@ public class CarbonReaderBuilder {
    *
    * @param key   the string of secret key for different S3 type,like: fs.s3a.secret.key
    * @param value the value of secret key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setSecretKey(String key, String value) {
     FileFactory.getConfiguration().set(key, value);
@@ -111,7 +138,7 @@ public class CarbonReaderBuilder {
    * Set the secret key for S3
    *
    * @param value the value of secret key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setSecretKey(String value) {
     return setSecretKey(Constants.SECRET_KEY, value);
@@ -122,7 +149,7 @@ public class CarbonReaderBuilder {
    *
    * @param key   the string of endpoint for different S3 type,like: fs.s3a.endpoint
    * @param value the value of endpoint
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setEndPoint(String key, String value) {
     FileFactory.getConfiguration().set(key, value);
@@ -133,13 +160,20 @@ public class CarbonReaderBuilder {
    * Set the endpoint for S3
    *
    * @param value the value of endpoint
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setEndPoint(String value) {
-    FileFactory.getConfiguration().set(Constants.ENDPOINT, value);
-    return this;
+    return setEndPoint(Constants.ENDPOINT, value);
   }
 
+  /**
+   * Build CarbonReader
+   *
+   * @param <T>
+   * @return CarbonReader
+   * @throws IOException
+   * @throws InterruptedException
+   */
   public <T> CarbonReader<T> build() throws IOException, InterruptedException {
     CarbonTable table = CarbonTable.buildFromTablePath(tableName, tablePath, isTransactionalTable);
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9a90e17b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 2277ab0..e2dc8c2 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -120,10 +120,10 @@ public class CarbonWriterBuilder {
   /**
    * If set false, writes the carbondata and carbonindex files in a flat folder structure
    * @param isTransactionalTable is a boolelan value
-   * if set to false, then writes the carbondata and carbonindex files
+   * If set to false, then writes the carbondata and carbonindex files
    * in a flat folder structure.
-   * if set to true, then writes the carbondata and carbonindex files
-   * in segment folder structure..
+   * If set to true, then writes the carbondata and carbonindex files
+   * in segment folder structure.
    * By default set to false.
    * @return updated CarbonWriterBuilder
    */
@@ -285,7 +285,7 @@ public class CarbonWriterBuilder {
   }
 
   /**
-   * To set the blocklet size of carbondata file
+   * To set the blocklet size of CarbonData file
    * @param blockletSize is blocklet size in MB
    * default value is 64 MB
    * @return updated CarbonWriterBuilder


[21/26] carbondata git commit: [CARBONDATA-2521] Support create carbonReader without tableName

Posted by ra...@apache.org.
[CARBONDATA-2521] Support create carbonReader without tableName

Add new method for creating carbonReader without tableName

1.add new interface: public static CarbonReaderBuilder builder(String tablePath)
2.Default value of table name is UnknownTable + time

This closes #2336


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/43e0c59f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/43e0c59f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/43e0c59f

Branch: refs/heads/branch-1.4
Commit: 43e0c59fc0245b879f843a6fdbdc5ef65630b506
Parents: cb71ffe
Author: xubo245 <xu...@huawei.com>
Authored: Wed May 23 21:08:23 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 21 +++++-
 .../carbondata/sdk/file/CarbonReader.java       | 19 +++++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 76 ++++++++++++++++++++
 3 files changed, 113 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/43e0c59f/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 1d225a9..360516a 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -429,14 +429,29 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ### Class org.apache.carbondata.sdk.file.CarbonReader
 ```
- /**
-  * Return a new CarbonReaderBuilder instance
-  */
+   /**
+    * Return a new {@link CarbonReaderBuilder} instance
+    *
+    * @param tablePath table store path
+    * @param tableName table name
+    * @return CarbonReaderBuilder object
+    */
   public static CarbonReaderBuilder builder(String tablePath, String tableName);
 ```
 
 ```
   /**
+   * Return a new CarbonReaderBuilder instance
+   * Default value of table name is table + tablePath + time
+   *
+   * @param tablePath table path
+   * @return CarbonReaderBuilder object
+   */
+  public static CarbonReaderBuilder builder(String tablePath);
+```
+
+```
+  /**
    * Return true if has next row
    */
   public boolean hasNext();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/43e0c59f/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 60ead05..81db7b2 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -18,6 +18,8 @@
 package org.apache.carbondata.sdk.file;
 
 import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
@@ -83,12 +85,29 @@ public class CarbonReader<T> {
 
   /**
    * Return a new {@link CarbonReaderBuilder} instance
+   *
+   * @param tablePath table store path
+   * @param tableName table name
+   * @return CarbonReaderBuilder object
    */
   public static CarbonReaderBuilder builder(String tablePath, String tableName) {
     return new CarbonReaderBuilder(tablePath, tableName);
   }
 
   /**
+   * Return a new {@link CarbonReaderBuilder} instance
+   * Default value of table name is table + tablePath + time
+   *
+   * @param tablePath table path
+   * @return CarbonReaderBuilder object
+   */
+  public static CarbonReaderBuilder builder(String tablePath) {
+    String time = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date());
+    String tableName = "UnknownTable" + time;
+    return builder(tablePath, tableName);
+  }
+
+  /**
    * Close reader
    *
    * @throws IOException

http://git-wip-us.apache.org/repos/asf/carbondata/blob/43e0c59f/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index deb6d06..95c25f8 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -225,6 +225,82 @@ public class CarbonReaderTest extends TestCase {
   }
 
   @Test
+  public void testWriteAndReadFilesWithoutTableName() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path)
+        .projection(new String[]{"name", "age"})
+        .isTransactionalTable(true)
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testWriteAndReadFilesWithoutTableName2() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true,false);
+
+    CarbonReader reader = CarbonReader
+        .builder(path)
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
   public void testReadSchemaFromDataFile() throws IOException {
     String path = "./testWriteFiles";
     FileUtils.deleteDirectory(new File(path));


[12/26] carbondata git commit: [CARBONDATA-2524] Support create carbonReader with default projection

Posted by ra...@apache.org.
[CARBONDATA-2524] Support create carbonReader with default projection

This closes #2338


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/60b65691
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/60b65691
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/60b65691

Branch: refs/heads/branch-1.4
Commit: 60b65691fec1ce28dda5187100cf8109796befa8
Parents: f27fe0a
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 24 09:33:23 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               |  10 ++
 .../sdk/file/CarbonReaderBuilder.java           |  40 +++++++-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 101 +++++++++++++++++++
 3 files changed, 149 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/60b65691/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 4d258f0..328a845 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -493,6 +493,16 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ```
   /**
+   * Project all Columns for carbon reader
+   *
+   * @return CarbonReaderBuilder object
+   * @throws IOException
+   */
+  public CarbonReaderBuilder projectAllColumns();
+```
+
+```
+  /**
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/60b65691/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index c78cda0..4103c63 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -26,6 +26,7 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.hadoop.CarbonProjection;
 import org.apache.carbondata.hadoop.api.CarbonFileInputFormat;
@@ -51,6 +52,12 @@ public class CarbonReaderBuilder {
   private boolean isTransactionalTable = true;
 
   /**
+   * It will be true if use the projectAllColumns method,
+   * it will be false if use the projection method
+   */
+  private boolean isProjectAllColumns = true;
+
+  /**
    * Construct a CarbonReaderBuilder with table path and table name
    *
    * @param tablePath table path
@@ -70,6 +77,7 @@ public class CarbonReaderBuilder {
   public CarbonReaderBuilder projection(String[] projectionColumnNames) {
     Objects.requireNonNull(projectionColumnNames);
     this.projectionColumns = projectionColumnNames;
+    isProjectAllColumns = false;
     return this;
   }
 
@@ -89,6 +97,33 @@ public class CarbonReaderBuilder {
   }
 
   /**
+   * Project all Columns for carbon reader
+   *
+   * @return CarbonReaderBuilder object
+   * @throws IOException
+   */
+  public CarbonReaderBuilder projectAllColumns() throws IOException {
+    CarbonTable carbonTable = CarbonTable
+        .buildFromTablePath(tableName, tablePath, isTransactionalTable);
+
+    List<ColumnSchema> colList = carbonTable.getTableInfo().getFactTable().getListOfColumns();
+    List<String> projectColumn = new ArrayList<String>();
+    for (ColumnSchema cols : colList) {
+      if (cols.getSchemaOrdinal() != -1) {
+        projectColumn.add(cols.getColumnUniqueId());
+      }
+    }
+    projectionColumns = new String[projectColumn.size()];
+    int i = 0;
+    for (String columnName : projectColumn) {
+      projectionColumns[i] = columnName;
+      i++;
+    }
+    isProjectAllColumns = true;
+    return this;
+  }
+
+  /**
    * Configure the filter expression for carbon reader
    *
    * @param filterExpression filter expression
@@ -186,9 +221,10 @@ public class CarbonReaderBuilder {
     if (filterExpression != null) {
       format.setFilterPredicates(job.getConfiguration(), filterExpression);
     }
-    if (projectionColumns != null) {
-      format.setColumnProjection(job.getConfiguration(), new CarbonProjection(projectionColumns));
+    if (isProjectAllColumns) {
+      projectAllColumns();
     }
+    format.setColumnProjection(job.getConfiguration(), new CarbonProjection(projectionColumns));
 
     final List<InputSplit> splits =
         format.getSplits(new JobContextImpl(job.getConfiguration(), new JobID()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/60b65691/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 0d2c84e..756dbe4 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -409,4 +409,105 @@ public class CarbonReaderTest extends TestCase {
         badRecordLoc);
   }
 
+  @Test
+  public void testReadFilesWithProjectAllColumns() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projectAllColumns()
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testReadFilesWithDefaultProjection() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+  }
+
+  @Test
+  public void testReadFilesWithNullProjection() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{})
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+    // Default sort column is applied for dimensions. So, need  to validate accordingly
+
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      assert(row.length==0);
+    }
+  }
 }


[24/26] carbondata git commit: [CARONDATA-2559]task id set for each carbonReader in threadlocal

Posted by ra...@apache.org.
[CARONDATA-2559]task id set for each carbonReader in threadlocal

1. Task Id set for CarbonReader because for each CarbonReader object it should be separate Thread Local variable .
2. If sort-Column is not given to CarbonWriter Describe formatted showing default sort_cols is fixed
3. Issue : CarbonReader was being closed after one iteration. So when reader iterates over the next batch it gives NullPointerException because it is already closed.
Solution : reader is closed if any exception encountered. Else It will be closed explicitly by user.
4. CarbonProperties API for SDK moved to common API List because Property setting is common for both(carbonReader and CarbonWriter) .


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d510e142
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d510e142
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d510e142

Branch: refs/heads/branch-1.4
Commit: d510e1424d7fed593ea6ec80853313f8a6f91227
Parents: ef47070
Author: rahulforallp <ra...@knoldus.in>
Authored: Tue May 29 10:23:46 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:07:06 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 95 ++++++++++----------
 .../TestNonTransactionalCarbonTable.scala       | 13 +--
 .../carbondata/sdk/file/CarbonReader.java       |  5 ++
 .../sdk/file/CarbonReaderBuilder.java           | 10 ++-
 .../sdk/file/CarbonWriterBuilder.java           |  4 +-
 .../sdk/file/CSVCarbonWriterTest.java           |  2 +-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 41 ++++-----
 .../apache/carbondata/sdk/file/TestUtil.java    |  4 +-
 .../carbondata/store/LocalCarbonStoreTest.java  |  2 +-
 9 files changed, 96 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 2371b33..5dbb5ac 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -350,52 +350,6 @@ public Schema(Field[] fields);
 public static Schema parseJson(String json);
 ```
 
-### Class org.apache.carbondata.core.util.CarbonProperties
-
-```
-/**
-* This method will be responsible to get the instance of CarbonProperties class
-*
-* @return carbon properties instance
-*/
-public static CarbonProperties getInstance();
-```
-
-```
-/**
-* This method will be used to add a new property
-*
-* @param key is a property name to set for carbon.
-* @param value is valid parameter corresponding to property.
-* @return CarbonProperties object
-*/
-public CarbonProperties addProperty(String key, String value);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value.
-* @return properties value for corresponding key. If not set, then returns null.
-*/
-public String getProperty(String key);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value..
-* @param defaultValue used to be returned by function if corrosponding key not set.
-* @return properties value for corresponding key. If not set, then returns specified defaultValue.
-*/
-public String getProperty(String key, String defaultValue);
-```
-Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
-
 ### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
 ```
 /**
@@ -705,3 +659,52 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 ```
 
 Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.
+
+
+# Common API List for CarbonReader and CarbonWriter
+
+### Class org.apache.carbondata.core.util.CarbonProperties
+
+```
+/**
+* This method will be responsible to get the instance of CarbonProperties class
+*
+* @return carbon properties instance
+*/
+public static CarbonProperties getInstance();
+```
+
+```
+/**
+* This method will be used to add a new property
+*
+* @param key is a property name to set for carbon.
+* @param value is valid parameter corresponding to property.
+* @return CarbonProperties object
+*/
+public CarbonProperties addProperty(String key, String value);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value.
+* @return properties value for corresponding key. If not set, then returns null.
+*/
+public String getProperty(String key);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value..
+* @param defaultValue used to be returned by function if corrosponding key not set.
+* @return properties value for corresponding key. If not set, then returns specified defaultValue.
+*/
+public String getProperty(String key, String defaultValue);
+```
+Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 0083733..5beb9c4 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -378,7 +378,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-    checkExistence(sql("describe formatted sdkOutputTable"), true, "name")
+    checkExistence(sql("describe formatted sdkOutputTable"), true, "SORT_COLUMNS                        name")
 
     buildTestDataWithSortColumns(List())
     assert(new File(writerPath).exists())
@@ -390,15 +390,18 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-    sql("describe formatted sdkOutputTable").show(false)
+    checkExistence(sql("describe formatted sdkOutputTable"),false,"SORT_COLUMNS                        name")
     sql("select * from sdkOutputTable").show()
 
+    sql("DROP TABLE sdkOutputTable")
+    // drop table should not delete the files
+    assert(new File(writerPath).exists())
+    cleanTestData()
+
     intercept[RuntimeException] {
       buildTestDataWithSortColumns(List(""))
     }
-
-    sql("DROP TABLE sdkOutputTable")
-    // drop table should not delete the files
+    
     assert(!(new File(writerPath).exists()))
     cleanTestData()
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 81db7b2..9af710f 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -24,6 +24,8 @@ import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.util.CarbonTaskInfo;
+import org.apache.carbondata.core.util.ThreadLocalTaskInfo;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
@@ -54,6 +56,9 @@ public class CarbonReader<T> {
     this.readers = readers;
     this.index = 0;
     this.currentReader = readers.get(0);
+    CarbonTaskInfo carbonTaskInfo = new CarbonTaskInfo();
+    carbonTaskInfo.setTaskId(System.nanoTime());
+    ThreadLocalTaskInfo.setCarbonTaskInfo(carbonTaskInfo);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index e99ff0d..9d7470e 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -233,9 +233,13 @@ public class CarbonReaderBuilder {
       TaskAttemptContextImpl attempt =
           new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID());
       RecordReader reader = format.createRecordReader(split, attempt);
-      reader.initialize(split, attempt);
-      reader.close();
-      readers.add(reader);
+      try {
+        reader.initialize(split, attempt);
+        readers.add(reader);
+      } catch (Exception e) {
+        reader.close();
+        throw e;
+      }
     }
 
     return new CarbonReader<>(readers);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index e2dc8c2..bd64568 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -379,7 +379,7 @@ public class CarbonWriterBuilder {
     }
 
     List<String> sortColumnsList = new ArrayList<>();
-    if (sortColumns == null || sortColumns.length == 0) {
+    if (sortColumns == null) {
       // If sort columns are not specified, default set all dimensions to sort column.
       // When dimensions are default set to sort column,
       // Inverted index will be supported by default for sort columns.
@@ -484,7 +484,7 @@ public class CarbonWriterBuilder {
           if (isSortColumn > -1) {
             columnSchema.setSortColumn(true);
             sortColumnsSchemaList[isSortColumn] = columnSchema;
-          } else if (sortColumnsList.isEmpty() && columnSchema.isDimensionColumn()
+          } else if (!sortColumnsList.isEmpty() && columnSchema.isDimensionColumn()
               && columnSchema.getNumberOfChild() < 1) {
             columnSchema.setSortColumn(true);
             sortColumnsSchemaList[i] = columnSchema;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
index 1eed47b..865097b 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
@@ -205,7 +205,7 @@ public class CSVCarbonWriterTest {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     String schemaFile = CarbonTablePath.getSchemaFilePath(path);
     Assert.assertTrue(new File(schemaFile).exists());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 95c25f8..db118cd 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -59,28 +59,28 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(200, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
 
     // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
+    String[] name = new String[200];
+    Integer[] age = new Integer[200];
+    for (int i = 0; i < 200; i++) {
       name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
+      age[i] = i;
     }
 
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
       // Default sort column is applied for dimensions. So, need  to validate accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      assert(Arrays.asList(name).contains(row[0]));
+      assert(Arrays.asList(age).contains(row[1]));
       i++;
     }
-    Assert.assertEquals(i, 100);
+    Assert.assertEquals(i, 200);
 
     reader.close();
 
@@ -95,11 +95,11 @@ public class CarbonReaderTest extends TestCase {
     while (reader2.hasNext()) {
       Object[] row = (Object[]) reader2.readNextRow();
       // Default sort column is applied for dimensions. So, need  to validate accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      assert(Arrays.asList(name).contains(row[0]));
+      assert(Arrays.asList(age).contains(row[1]));
       i++;
     }
-    Assert.assertEquals(i, 100);
+    Assert.assertEquals(i, 200);
     reader2.close();
 
     FileUtils.deleteDirectory(new File(path));
@@ -114,7 +114,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -156,7 +156,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -193,7 +193,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
@@ -233,7 +233,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path)
@@ -309,7 +309,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     File[] dataFiles = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
       @Override public boolean accept(File dir, String name) {
@@ -337,7 +337,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     File[] dataFiles = new File(path + "/Metadata").listFiles(new FilenameFilter() {
       @Override public boolean accept(File dir, String name) {
@@ -887,7 +887,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -926,7 +926,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -948,6 +948,7 @@ public class CarbonReaderTest extends TestCase {
       Assert.assertEquals(age[i], row[1]);
       i++;
     }
+    reader.close();
     Assert.assertEquals(i, 100);
   }
 
@@ -960,7 +961,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     try {
       CarbonReader reader = CarbonReader

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
index eb406e2..0f00d61 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
@@ -39,8 +39,8 @@ public class TestUtil {
     writeFilesAndVerify(100, schema, path, sortColumns, false, -1, -1, true);
   }
 
-  public static void writeFilesAndVerify(Schema schema, String path, boolean persistSchema) {
-    writeFilesAndVerify(100, schema, path, null, persistSchema, -1, -1, true);
+  public static void writeFilesAndVerify(int rows, Schema schema, String path, boolean persistSchema) {
+    writeFilesAndVerify(rows, schema, path, null, persistSchema, -1, -1, true);
   }
 
   public static void writeFilesAndVerify(Schema schema, String path, boolean persistSchema,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d510e142/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
index 51d0b27..c885a26 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
@@ -56,7 +56,7 @@ public class LocalCarbonStoreTest {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonStore store = new LocalCarbonStore();
     Iterator<CarbonRow> rows = store.scan(path, new String[]{"name, age"}, null);


[15/26] carbondata git commit: [CARBONDATA-2558] Optimize carbon schema reader interface of SDK

Posted by ra...@apache.org.
[CARBONDATA-2558] Optimize carbon schema reader interface of SDK

Optimize carbon schema reader interface of SDK

1.create CarbonSchemaReader and move schema read interface from CarbonReader to CarbonSchemaReader
2.change the return type from List to SDK Schema, remove the tableInfo return type
3.Optimize the document

This closes #2353


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/6d245b9d
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/6d245b9d
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/6d245b9d

Branch: refs/heads/branch-1.4
Commit: 6d245b9daf0dce4d3366c540c28bf27a4e38ac90
Parents: b792b3e
Author: xubo245 <xu...@huawei.com>
Authored: Tue May 29 17:07:10 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 107 +++++++++++++++---
 .../carbondata/sdk/file/CarbonReader.java       |  85 ---------------
 .../carbondata/sdk/file/CarbonSchemaReader.java | 108 +++++++++++++++++++
 .../org/apache/carbondata/sdk/file/Field.java   |  16 +++
 .../org/apache/carbondata/sdk/file/Schema.java  |  31 ++++++
 .../carbondata/store/MetaCachedCarbonStore.java |  13 ++-
 .../carbondata/sdk/file/CarbonReaderTest.java   |  88 ++++++++-------
 7 files changed, 305 insertions(+), 143 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 328a845..3c575fe 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -437,20 +437,6 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ```
   /**
-   * Read carbondata file and return the schema
-   */
-  public static List<ColumnSchema> readSchemaInDataFile(String dataFilePath);
-```
-
-```
- /**
-  * Read schema file and return table info object
-  */
-  public static TableInfo readSchemaFile(String schemaFilePath);
-```
-
-```
-  /**
    * Return true if has next row
    */
   public boolean hasNext();
@@ -598,4 +584,97 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
    */
   public <T> CarbonReader<T> build();
 ```
+### Class org.apache.carbondata.sdk.file.CarbonSchemaReader
+```
+  /**
+   * Read schema file and return the schema
+   *
+   * @param schemaFilePath complete path including schema file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInSchemaFile(String schemaFilePath);
+```
+
+```
+  /**
+   * Read carbondata file and return the schema
+   *
+   * @param dataFilePath complete path including carbondata file name
+   * @return Schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInDataFile(String dataFilePath);
+```
+
+```
+  /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInIndexFile(String indexFilePath);
+```
+
+### Class org.apache.carbondata.sdk.file.Schema
+```
+  /**
+   * construct a schema with fields
+   * @param fields
+   */
+  public Schema(Field[] fields);
+```
+
+```
+  /**
+   * construct a schema with List<ColumnSchema>
+   *
+   * @param columnSchemaList column schema list
+   */
+  public Schema(List<ColumnSchema> columnSchemaList);
+```
+
+```
+  /**
+   * Create a Schema using JSON string, for example:
+   * [
+   *   {"name":"string"},
+   *   {"age":"int"}
+   * ]
+   * @param json specified as string
+   * @return Schema
+   */
+  public static Schema parseJson(String json);
+```
+
+```
+  /**
+   * Sort the schema order as original order
+   *
+   * @return Schema object
+   */
+  public Schema asOriginOrder();
+```
+
+### Class org.apache.carbondata.sdk.file.Field
+```
+  /**
+   * Field Constructor
+   * @param name name of the field
+   * @param type datatype of field, specified in strings.
+   */
+  public Field(String name, String type);
+```
+
+```
+  /**
+   * Construct Field from ColumnSchema
+   *
+   * @param columnSchema ColumnSchema, Store the information about the column meta data
+   */
+  public Field(ColumnSchema columnSchema);
+```
+
 Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 9ae940b..60ead05 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -17,26 +17,11 @@
 
 package org.apache.carbondata.sdk.file;
 
-import java.io.DataInputStream;
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
-import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.metadata.converter.SchemaConverter;
-import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.reader.CarbonHeaderReader;
-import org.apache.carbondata.core.reader.CarbonIndexFileReader;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
@@ -104,76 +89,6 @@ public class CarbonReader<T> {
   }
 
   /**
-   * Read carbondata file and return the schema
-   */
-  public static List<ColumnSchema> readSchemaInDataFile(String dataFilePath) throws IOException {
-    CarbonHeaderReader reader = new CarbonHeaderReader(dataFilePath);
-    return reader.readSchema();
-  }
-
-  /**
-   * Read carbonindex file and return the schema
-   *
-   * @param indexFilePath complete path including index file name
-   * @return null, if the index file is not present in the path.
-   * List<ColumnSchema> from the index file.
-   * @throws IOException
-   */
-  public static List<ColumnSchema> readSchemaInIndexFile(String indexFilePath) throws IOException {
-    CarbonFile indexFile =
-        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
-    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
-      throw new IOException("Not an index file name");
-    }
-    // read schema from the first index file
-    DataInputStream dataInputStream =
-        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
-    byte[] bytes = new byte[(int) indexFile.getSize()];
-    try {
-      //get the file in byte buffer
-      dataInputStream.readFully(bytes);
-      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
-      // read from byte buffer.
-      indexReader.openThriftReader(bytes);
-      // get the index header
-      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
-      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
-      List<org.apache.carbondata.format.ColumnSchema> table_columns =
-          readIndexHeader.getTable_columns();
-      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
-        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
-      }
-      return columnSchemaList;
-    } finally {
-      dataInputStream.close();
-    }
-  }
-
-  /**
-   * Read CarbonData file and return the user schema,
-   * the schema order is the same as user save schema
-   */
-  public static List<ColumnSchema> readUserSchema(String indexFilePath) throws IOException {
-    List<ColumnSchema> columnSchemas = readSchemaInIndexFile(indexFilePath);
-    Collections.sort(columnSchemas, new Comparator<ColumnSchema>() {
-      @Override
-      public int compare(ColumnSchema o1, ColumnSchema o2) {
-        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
-      }
-    });
-    return columnSchemas;
-  }
-
-  /**
-   * Read schema file and return table info object
-   */
-  public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {
-    org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.readSchemaFile(schemaFilePath);
-    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
-    return schemaConverter.fromExternalToWrapperTableInfo(tableInfo, "", "", "");
-  }
-
-  /**
    * Close reader
    *
    * @throws IOException

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
new file mode 100644
index 0000000..d8882bc
--- /dev/null
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.sdk.file;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.metadata.converter.SchemaConverter;
+import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.reader.CarbonIndexFileReader;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
+
+/**
+ * Schema reader for carbon files, including carbondata file, carbonindex file, and schema file
+ */
+public class CarbonSchemaReader {
+
+  /**
+   * Read schema file and return the schema
+   *
+   * @param schemaFilePath complete path including schema file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInSchemaFile(String schemaFilePath) throws IOException {
+    org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.readSchemaFile(schemaFilePath);
+    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
+    List<ColumnSchema> schemaList = schemaConverter
+        .fromExternalToWrapperTableInfo(tableInfo, "", "", "")
+        .getFactTable()
+        .getListOfColumns();
+    return new Schema(schemaList);
+  }
+
+  /**
+   * Read carbondata file and return the schema
+   *
+   * @param dataFilePath complete path including carbondata file name
+   * @return Schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInDataFile(String dataFilePath) throws IOException {
+    CarbonHeaderReader reader = new CarbonHeaderReader(dataFilePath);
+    return new Schema(reader.readSchema());
+  }
+
+  /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInIndexFile(String indexFilePath) throws IOException {
+    CarbonFile indexFile =
+        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
+    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
+      throw new IOException("Not an index file name");
+    }
+    // read schema from the first index file
+    DataInputStream dataInputStream =
+        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
+    byte[] bytes = new byte[(int) indexFile.getSize()];
+    try {
+      //get the file in byte buffer
+      dataInputStream.readFully(bytes);
+      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
+      // read from byte buffer.
+      indexReader.openThriftReader(bytes);
+      // get the index header
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
+      }
+      return new Schema(columnSchemaList);
+    } finally {
+      dataInputStream.close();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
index 0db3bc5..6d4cfd9 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.sdk.file;
 
+import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
@@ -24,6 +25,7 @@ import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.datatype.StructField;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 /**
  * A field represent one column
@@ -126,6 +128,20 @@ public class Field {
     this.type = type;
   }
 
+  /**
+   * Construct Field from ColumnSchema
+   *
+   * @param columnSchema ColumnSchema, Store the information about the column meta data
+   */
+  public Field(ColumnSchema columnSchema) {
+    this.name = columnSchema.getColumnName();
+    this.type = columnSchema.getDataType();
+    children = new LinkedList<>();
+    schemaOrdinal = columnSchema.getSchemaOrdinal();
+    precision = columnSchema.getPrecision();
+    scale = columnSchema.getScale();
+  }
+
   public String getFieldName() {
     return name;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
index 31c202d..6131d45 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
@@ -18,9 +18,13 @@
 package org.apache.carbondata.sdk.file;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 import com.google.gson.GsonBuilder;
 import com.google.gson.TypeAdapter;
@@ -45,6 +49,18 @@ public class Schema {
   }
 
   /**
+   * construct a schema with List<ColumnSchema>
+   *
+   * @param columnSchemaList column schema list
+   */
+  public Schema(List<ColumnSchema> columnSchemaList) {
+    fields = new Field[columnSchemaList.size()];
+    for (int i = 0; i < columnSchemaList.size(); i++) {
+      fields[i] = new Field(columnSchemaList.get(i));
+    }
+  }
+
+  /**
    * Create a Schema using JSON string, for example:
    * [
    *   {"name":"string"},
@@ -77,4 +93,19 @@ public class Schema {
   public Field[] getFields() {
     return fields;
   }
+
+  /**
+   * Sort the schema order as original order
+   *
+   * @return Schema object
+   */
+  public Schema asOriginOrder() {
+    Arrays.sort(fields, new Comparator<Field>() {
+      @Override
+      public int compare(Field o1, Field o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+    return this;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
index d847e67..e43f750 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
@@ -22,10 +22,12 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.core.metadata.converter.SchemaConverter;
+import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
+import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.sdk.file.CarbonReader;
 
 /**
  * A CarbonStore base class that caches CarbonTable object
@@ -40,9 +42,12 @@ abstract class MetaCachedCarbonStore implements CarbonStore {
     if (cache.containsKey(path)) {
       return cache.get(path);
     }
-    TableInfo schema = CarbonReader.readSchemaFile(CarbonTablePath.getSchemaFilePath(path));
-    schema.setTablePath(path);
-    CarbonTable table = CarbonTable.buildFromTableInfo(schema);
+    org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil
+        .readSchemaFile(CarbonTablePath.getSchemaFilePath(path));
+    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
+    TableInfo tableInfo1 = schemaConverter.fromExternalToWrapperTableInfo(tableInfo, "", "", "");
+    tableInfo1.setTablePath(path);
+    CarbonTable table = CarbonTable.buildFromTableInfo(tableInfo1);
     cache.put(path, table);
     return table;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6d245b9d/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 30d4091..7a2a765 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -20,9 +20,8 @@ package org.apache.carbondata.sdk.file;
 import java.io.*;
 import java.sql.Date;
 import java.sql.Timestamp;
-import java.util.Collections;
+import java.util.Arrays;
 import java.util.Comparator;
-import java.util.List;
 
 import org.apache.avro.generic.GenericData;
 import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
@@ -30,8 +29,6 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
@@ -200,12 +197,12 @@ public class CarbonReaderTest extends TestCase {
     });
     Assert.assertTrue(dataFiles != null);
     Assert.assertTrue(dataFiles.length > 0);
-    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles[0].getAbsolutePath());
-    Assert.assertTrue(columns.size() == 2);
-    Assert.assertEquals("name", columns.get(0).getColumnName());
-    Assert.assertEquals("age", columns.get(1).getColumnName());
-    Assert.assertEquals(DataTypes.STRING, columns.get(0).getDataType());
-    Assert.assertEquals(DataTypes.INT, columns.get(1).getDataType());
+    Schema schema = CarbonSchemaReader.readSchemaInDataFile(dataFiles[0].getAbsolutePath());
+    Assert.assertTrue(schema.getFields().length == 2);
+    Assert.assertEquals("name", (schema.getFields())[0].getFieldName());
+    Assert.assertEquals("age", (schema.getFields())[1].getFieldName());
+    Assert.assertEquals(DataTypes.STRING, (schema.getFields())[0].getDataType());
+    Assert.assertEquals(DataTypes.INT, (schema.getFields())[1].getDataType());
 
     FileUtils.deleteDirectory(new File(path));
   }
@@ -228,20 +225,33 @@ public class CarbonReaderTest extends TestCase {
     });
     Assert.assertTrue(dataFiles != null);
     Assert.assertTrue(dataFiles.length > 0);
-    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
-    Assert.assertEquals(2, tableInfo.getFactTable().getListOfColumns().size());
 
-    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
-    Assert.assertEquals(2, columns.size());
-    Assert.assertEquals("name", columns.get(0).getColumnName());
-    Assert.assertEquals("age", columns.get(1).getColumnName());
-    Assert.assertEquals(DataTypes.STRING, columns.get(0).getDataType());
-    Assert.assertEquals(DataTypes.INT, columns.get(1).getDataType());
+    Schema schema = CarbonSchemaReader.readSchemaInSchemaFile(dataFiles[0].getAbsolutePath());
+
+    // sort the schema
+    Arrays.sort(schema.getFields(), new Comparator<Field>() {
+      @Override
+      public int compare(Field o1, Field o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
+    }
+
+    Assert.assertEquals(2, schema.getFields().length);
+
+    Assert.assertEquals("name", (schema.getFields())[0].getFieldName());
+    Assert.assertEquals("age", (schema.getFields())[1].getFieldName());
+    Assert.assertEquals(DataTypes.STRING, (schema.getFields())[0].getDataType());
+    Assert.assertEquals(DataTypes.INT, (schema.getFields())[1].getDataType());
 
     FileUtils.deleteDirectory(new File(path));
   }
 
-
   @Test
   public void testWriteAndReadFilesNonTransactional() throws IOException, InterruptedException {
     String path = "./testWriteFiles";
@@ -473,22 +483,20 @@ public class CarbonReaderTest extends TestCase {
         return name.endsWith("schema");
       }
     });
-    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
-
-    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
+    Schema schema = CarbonSchemaReader.readSchemaInSchemaFile(dataFiles[0].getAbsolutePath());
 
     // sort the schema
-    Collections.sort(tableInfo.getFactTable().getListOfColumns(), new Comparator<ColumnSchema>() {
+    Arrays.sort(schema.getFields(), new Comparator<Field>() {
       @Override
-      public int compare(ColumnSchema o1, ColumnSchema o2) {
+      public int compare(Field o1, Field o2) {
         return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
       }
     });
 
     // Transform the schema
-    String[] strings= new String[columns.size()];
-    for (int i = 0; i < columns.size(); i++) {
-      strings[i]= columns.get(i).getColumnName();
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
     }
 
     File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
@@ -591,20 +599,20 @@ public class CarbonReaderTest extends TestCase {
       }
     });
 
-    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
+    Schema schema = CarbonSchemaReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
 
     // sort the schema
-    Collections.sort(columns, new Comparator<ColumnSchema>() {
+    Arrays.sort(schema.getFields(), new Comparator<Field>() {
       @Override
-      public int compare(ColumnSchema o1, ColumnSchema o2) {
+      public int compare(Field o1, Field o2) {
         return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
       }
     });
 
     // Transform the schema
-    String[] strings= new String[columns.size()];
-    for (int i = 0; i < columns.size(); i++) {
-      strings[i]= columns.get(i).getColumnName();
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
     }
 
     File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
@@ -704,12 +712,12 @@ public class CarbonReaderTest extends TestCase {
       }
     });
 
-    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+    Schema schema = CarbonSchemaReader.readSchemaInIndexFile(dataFiles2[0].getAbsolutePath()).asOriginOrder();
 
     // Transform the schema
-    String[] strings= new String[columns.size()];
-    for (int i = 0; i < columns.size(); i++) {
-      strings[i]= columns.get(i).getColumnName();
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
     }
 
     File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
@@ -936,10 +944,10 @@ public class CarbonReaderTest extends TestCase {
       }
     });
 
-    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+    Schema schema = CarbonSchemaReader.readSchemaInIndexFile(dataFiles2[0].getAbsolutePath()).asOriginOrder();
 
-    for (int i = 0; i < columns.size(); i++) {
-      System.out.println(columns.get(i).getColumnName() + "\t" + columns.get(i).getSchemaOrdinal());
+    for (int i = 0; i < schema.getFields().length; i++) {
+      System.out.println((schema.getFields())[i].getFieldName() + "\t" + schema.getFields()[i].getSchemaOrdinal());
     }
     FileUtils.deleteDirectory(new File(path));
   }


[06/26] carbondata git commit: [CARBONDATA-2481] Adding SDV for SDKwriter

Posted by ra...@apache.org.
[CARBONDATA-2481] Adding SDV for SDKwriter

Adding SDV testcases for SDKwriter

This closes #2308


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8ef6bd1f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8ef6bd1f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8ef6bd1f

Branch: refs/heads/branch-1.4
Commit: 8ef6bd1f4b3c4d33a692fef8b78da8431fa399ac
Parents: d4f9c34
Author: Indhumathi27 <in...@gmail.com>
Authored: Fri May 11 10:29:42 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 integration/spark-common-cluster-test/pom.xml   |  12 +
 .../sdv/generated/SDKwriterTestCase.scala       | 732 +++++++++++++++++++
 .../cluster/sdv/suite/SDVSuites.scala           |   1 +
 3 files changed, 745 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8ef6bd1f/integration/spark-common-cluster-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/pom.xml b/integration/spark-common-cluster-test/pom.xml
index 44453b3..d8aecc2 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -68,6 +68,18 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-store-sdk</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>tech.allegro.schema.json2avro</groupId>
+      <artifactId>converter</artifactId>
+      <version>0.2.5</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8ef6bd1f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
new file mode 100644
index 0000000..012091d
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
@@ -0,0 +1,732 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+
+import java.util
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterEach
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+
+import org.apache.avro
+import org.apache.commons.lang.CharEncoding
+import org.junit.Assert
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import org.apache.carbondata.core.util.CarbonUtil
+import org.apache.carbondata.sdk.file.{AvroCarbonWriter, CarbonWriter, Schema}
+
+/**
+ * Test Class for SDKwriterTestcase to verify all scenarios
+ */
+
+class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
+
+  var writerPath =
+    s"${ resourcesPath }" + "/SparkCarbonFileFormat/WriterOutput1/"
+
+  override def beforeEach: Unit = {
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql("DROP TABLE IF EXISTS sdkTable2")
+    sql("DROP TABLE IF EXISTS table1")
+    cleanTestData()
+  }
+
+  override def afterEach(): Unit = {
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql("DROP TABLE IF EXISTS sdkTable2")
+    sql("DROP TABLE IF EXISTS table1")
+    cleanTestData()
+  }
+
+  def cleanTestData() = {
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+  }
+
+  def buildTestDataSingleFile(): Any = {
+    buildTestData(3, false, null)
+  }
+
+  def buildTestDataWithBadRecordForce(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "FORCE").asJava
+    buildTestData(3, false, options)
+  }
+
+  def buildTestDataWithBadRecordFail(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "FAIL").asJava
+    buildTestData(15001, false, options)
+  }
+
+  def buildTestData(rows: Int,
+      persistSchema: Boolean,
+      options: util.Map[String, String]): Any = {
+    buildTestData(rows, persistSchema, options, List("name"), writerPath)
+  }
+
+  // prepare sdk writer output
+  def buildTestData(rows: Int,
+      persistSchema: Boolean,
+      options: util.Map[String, String],
+      sortColumns: List[String],
+      writerPath: String): Any = {
+    val schema = new StringBuilder()
+      .append("[ \n")
+      .append("   {\"name\":\"string\"},\n")
+      .append("   {\"age\":\"int\"},\n")
+      .append("   {\"height\":\"double\"}\n")
+      .append("]")
+      .toString()
+
+    try {
+      val builder = CarbonWriter.builder()
+      val writer =
+        if (persistSchema) {
+          builder.persistSchemaFile(true)
+          builder
+            .sortBy(sortColumns.toArray)
+            .outputPath(writerPath)
+            .isTransactionalTable(false)
+            .uniqueIdentifier(System.currentTimeMillis)
+            .buildWriterForCSVInput(Schema.parseJson(schema))
+        } else {
+          if (options != null) {
+            builder.outputPath(writerPath)
+              .isTransactionalTable(false)
+              .sortBy(sortColumns.toArray)
+              .uniqueIdentifier(
+                System.currentTimeMillis).withBlockSize(2).withLoadOptions(options)
+              .buildWriterForCSVInput(Schema.parseJson(schema))
+          } else {
+            builder.outputPath(writerPath)
+              .isTransactionalTable(false)
+              .sortBy(sortColumns.toArray)
+              .uniqueIdentifier(
+                System.currentTimeMillis).withBlockSize(2)
+              .buildWriterForCSVInput(Schema.parseJson(schema))
+          }
+        }
+      var i = 0
+      while (i < rows) {
+        if ((options != null) && (i < 3)) {
+          // writing a bad record
+          writer.write(Array[String]("abc" + i, String.valueOf(i.toDouble / 2), "abc"))
+        } else {
+          writer.write(Array[String]("abc" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
+        }
+        i += 1
+      }
+      if (options != null) {
+        //Keep one valid record. else carbon data file will not generate
+        writer.write(Array[String]("abc" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
+      }
+      writer.close()
+    } catch {
+      case ex: Exception => throw new RuntimeException(ex)
+
+      case _ => None
+    }
+  }
+
+  def buildTestDataWithBadRecordIgnore(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "IGNORE").asJava
+    buildTestData(3, false, options)
+  }
+
+  def buildTestDataWithBadRecordRedirect(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "REDIRECT").asJava
+    buildTestData(3, false, options)
+  }
+
+  def deleteFile(path: String, extension: String): Unit = {
+    val file: CarbonFile = FileFactory
+      .getCarbonFile(path, FileFactory.getFileType(path))
+
+    for (eachDir <- file.listFiles) {
+      if (!eachDir.isDirectory) {
+        if (eachDir.getName.endsWith(extension)) {
+          CarbonUtil.deleteFoldersAndFilesSilent(eachDir)
+        }
+      } else {
+        deleteFile(eachDir.getPath, extension)
+      }
+    }
+  }
+
+  test("test create External Table with WriterPath") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test create External Table with Comment") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable comment 'this is comment' STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test create External Table and test files written from sdk writer") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+
+    checkAnswer(sql("select name from sdkTable"), Seq(Row("abc0"),
+      Row("abc1"),
+      Row("abc2")))
+
+    checkAnswer(sql("select age from sdkTable"), Seq(Row(0), Row(1), Row(2)))
+    checkAnswer(sql("select * from sdkTable where age > 1 and age < 8"),
+      Seq(Row("abc2", 2, 1.0)))
+
+    checkAnswer(sql("select * from sdkTable where name = 'abc2'"),
+      Seq(Row("abc2", 2, 1.0)))
+
+    checkAnswer(sql("select * from sdkTable where name like '%b%' limit 2"),
+      Seq(Row("abc0", 0, 0.0),
+        Row("abc1", 1, 0.5)))
+
+    checkAnswer(sql("select sum(age) from sdkTable where name like 'abc%'"), Seq(Row(3)))
+    checkAnswer(sql("select count(*) from sdkTable where name like 'abc%' "), Seq(Row(3)))
+    checkAnswer(sql("select count(*) from sdkTable"), Seq(Row(3)))
+
+  }
+
+  test("test create External Table and test insert into external table") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(1)))
+
+    sql("insert into sdktable select 'def0',1,5.5")
+    sql("insert into sdktable select 'def1',5,6.6")
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(2)))
+  }
+
+  test("test create External Table and test insert into normal table with different schema") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS table1")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    sql(
+      "create table if not exists table1 (name string, age int) STORED BY 'carbondata'")
+    sql("insert into table1 select * from sdkTable")
+    checkAnswer(sql("select * from table1"), Seq(Row("abc0", 0),
+      Row("abc1", 1),
+      Row("abc2", 2)))
+  }
+
+  test("test Insert into External Table from another External Table with Same Schema") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql("DROP TABLE IF EXISTS sdkTable2")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable1(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable2(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    sql("insert into sdkTable1 select *from sdkTable2")
+    checkAnswer(sql("select count(*) from sdkTable1"), Seq(Row(6)))
+  }
+
+  test("test create External Table with Schema with partition, external table should " +
+       "ignore schema and partition") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string) PARTITIONED BY (age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test External Table with insert overwrite") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS table1")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string) PARTITIONED BY (age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+
+    sql(
+      "create table if not exists table1 (name string, age int, height double) STORED BY 'org" +
+      ".apache.carbondata.format'")
+    sql(s"""insert into table1 values ("aaaaa", 12, 20)""")
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(1)))
+
+    sql("insert overwrite table sdkTable select * from table1")
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(0)))
+  }
+
+  test("test create External Table with Table properties should ignore tblproperties") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' TBLPROPERTIES('sort_scope'='batch_sort') """.stripMargin)
+
+    checkExistence(sql("Describe formatted sdkTable "), false, "batch_sort")
+  }
+
+  test("Read sdk writer output file and test without carbondata and carbonindex files should fail")
+  {
+    buildTestDataSingleFile()
+    deleteFile(writerPath, CarbonCommonConstants.FACT_FILE_EXT)
+    deleteFile(writerPath, CarbonCommonConstants.UPDATE_INDEX_FILE_EXT)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    val exception = intercept[Exception] {
+      //data source file format
+      sql(
+        s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+           |'$writerPath' """.stripMargin)
+    }
+    assert(exception.getMessage()
+      .contains("Operation not allowed: Invalid table path provided:"))
+  }
+
+  test("test create External Table and test CTAS") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS table1")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+
+    sql("create table table1 stored by 'carbondata' as select *from sdkTable")
+
+    checkAnswer(sql("select * from table1"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test create External Table and test JOIN on External Tables") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable1 STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable JOIN sdkTable1 on (sdkTable.age=sdkTable1.age)"),
+      Seq(Row("abc0", 0, 0.0, "abc0", 0, 0.0),
+        Row("abc1", 1, 0.5, "abc1", 1, 0.5),
+        Row("abc2", 2, 1.0, "abc2", 2, 1.0)))
+    checkAnswer(sql(
+      "select * from sdkTable LEFT OUTER JOIN sdkTable1 on (sdkTable.age=sdkTable1.age)"),
+      Seq(Row("abc0", 0, 0.0, "abc0", 0, 0.0),
+        Row("abc1", 1, 0.5, "abc1", 1, 0.5),
+        Row("abc2", 2, 1.0, "abc2", 2, 1.0)))
+    checkAnswer(sql(
+      "select * from sdkTable RIGHT OUTER JOIN sdkTable1 on (sdkTable.age=sdkTable1.age)"),
+      Seq(Row("abc0", 0, 0.0, "abc0", 0, 0.0),
+        Row("abc1", 1, 0.5, "abc1", 1, 0.5),
+        Row("abc2", 2, 1.0, "abc2", 2, 1.0)))
+  }
+
+  test("test create external table and test bad record") {
+    //1. Action = FORCE
+    buildTestDataWithBadRecordForce(writerPath)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("abc0", null, null),
+      Row("abc1", null, null),
+      Row("abc2", null, null),
+      Row("abc3", 3, 1.5)))
+
+    sql("DROP TABLE sdkTable")
+    cleanTestData()
+
+    //2. Action = REDIRECT
+    buildTestDataWithBadRecordRedirect(writerPath)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("abc3", 3, 1.5)))
+
+    sql("DROP TABLE sdkTable")
+    cleanTestData()
+
+    //3. Action = IGNORE
+    buildTestDataWithBadRecordIgnore(writerPath)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("abc3", 3, 1.5)))
+
+  }
+
+  def buildAvroTestDataStructType(): Any = {
+    buildAvroTestDataStruct(3, null)
+  }
+
+  def buildAvroTestDataStruct(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """
+        |{"name": "address",
+        | "type": "record",
+        | "fields": [
+        |  { "name": "name", "type": "string"},
+        |  { "name": "age", "type": "int"},
+        |  { "name": "address",  "type": {
+        |    "type" : "record",  "name" : "my_address",
+        |        "fields" : [
+        |    {"name": "street", "type": "string"},
+        |    {"name": "city", "type": "string"}]}}
+        |]}
+      """.stripMargin
+
+    val json = """ {"name":"bob", "age":10, "address" : {"street":"abc", "city":"bang"}} """
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  def buildAvroTestDataBothStructArrayType(): Any = {
+    buildAvroTestDataStructWithArrayType(3, null)
+  }
+
+  def buildAvroTestDataStructWithArrayType(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """
+                     {
+                     |     "name": "address",
+                     |     "type": "record",
+                     |     "fields": [
+                     |     { "name": "name", "type": "string"},
+                     |     { "name": "age", "type": "int"},
+                     |     {
+                     |     "name": "address",
+                     |     "type": {
+                     |     "type" : "record",
+                     |     "name" : "my_address",
+                     |     "fields" : [
+                     |     {"name": "street", "type": "string"},
+                     |     {"name": "city", "type": "string"}
+                     |     ]}
+                     |     },
+                     |     {"name" :"doorNum",
+                     |     "type" : {
+                     |     "type" :"array",
+                     |     "items":{
+                     |     "name" :"EachdoorNums",
+                     |     "type" : "int",
+                     |     "default":-1
+                     |     }}
+                     |     }]}
+                     """.stripMargin
+
+    val json =
+      """ {"name":"bob", "age":10,
+        |"address" : {"street":"abc", "city":"bang"},
+        |"doorNum" : [1,2,3,4]}""".stripMargin
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  private def WriteFilesWithAvroWriter(rows: Int,
+      mySchema: String,
+      json: String): Unit = {
+    // conversion to GenericData.Record
+    val nn = new avro.Schema.Parser().parse(mySchema)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json.getBytes(CharEncoding.UTF_8), nn)
+
+    try {
+      val writer = CarbonWriter.builder
+        .outputPath(writerPath).isTransactionalTable(false)
+        .uniqueIdentifier(System.currentTimeMillis()).buildWriterForAvroInput(nn)
+      var i = 0
+      while (i < rows) {
+        writer.write(record)
+        i = i + 1
+      }
+      writer.close()
+    }
+    catch {
+      case e: Exception => {
+        e.printStackTrace()
+        Assert.fail(e.getMessage)
+      }
+    }
+  }
+
+  def buildAvroTestDataArrayOfStructType(): Any = {
+    buildAvroTestDataArrayOfStruct(3, null)
+  }
+
+  def buildAvroTestDataArrayOfStruct(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """ {
+        |	"name": "address",
+        |	"type": "record",
+        |	"fields": [
+        |		{
+        |			"name": "name",
+        |			"type": "string"
+        |		},
+        |		{
+        |			"name": "age",
+        |			"type": "int"
+        |		},
+        |		{
+        |			"name": "doorNum",
+        |			"type": {
+        |				"type": "array",
+        |				"items": {
+        |					"type": "record",
+        |					"name": "my_address",
+        |					"fields": [
+        |						{
+        |							"name": "street",
+        |							"type": "string"
+        |						},
+        |						{
+        |							"name": "city",
+        |							"type": "string"
+        |						}
+        |					]
+        |				}
+        |			}
+        |		}
+        |	]
+        |} """.stripMargin
+    val json =
+      """ {"name":"bob","age":10,"doorNum" :
+        |[{"street":"abc","city":"city1"},
+        |{"street":"def","city":"city2"},
+        |{"street":"ghi","city":"city3"},
+        |{"street":"jkl","city":"city4"}]} """.stripMargin
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  def buildAvroTestDataStructOfArrayType(): Any = {
+    buildAvroTestDataStructOfArray(3, null)
+  }
+
+  def buildAvroTestDataStructOfArray(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """ {
+        |	"name": "address",
+        |	"type": "record",
+        |	"fields": [
+        |		{
+        |			"name": "name",
+        |			"type": "string"
+        |		},
+        |		{
+        |			"name": "age",
+        |			"type": "int"
+        |		},
+        |		{
+        |			"name": "address",
+        |			"type": {
+        |				"type": "record",
+        |				"name": "my_address",
+        |				"fields": [
+        |					{
+        |						"name": "street",
+        |						"type": "string"
+        |					},
+        |					{
+        |						"name": "city",
+        |						"type": "string"
+        |					},
+        |					{
+        |						"name": "doorNum",
+        |						"type": {
+        |							"type": "array",
+        |							"items": {
+        |								"name": "EachdoorNums",
+        |								"type": "int",
+        |								"default": -1
+        |							}
+        |						}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |} """.stripMargin
+
+    val json =
+      """ {
+        |	"name": "bob",
+        |	"age": 10,
+        |	"address": {
+        |		"street": "abc",
+        |		"city": "bang",
+        |		"doorNum": [
+        |			1,
+        |			2,
+        |			3,
+        |			4
+        |		]
+        |	}
+        |} """.stripMargin
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  test("Read sdk writer Avro output Record Type for nontransactional table") {
+    buildAvroTestDataStructType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("bob", 10, Row("abc", "bang")),
+      Row("bob", 10, Row("abc", "bang")),
+      Row("bob", 10, Row("abc", "bang"))))
+
+  }
+
+  test("Read sdk writer Avro output with both Array and Struct Type for nontransactional table") {
+    buildAvroTestDataBothStructArrayType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("bob", 10, Row("abc", "bang"), mutable.WrappedArray.newBuilder[Int].+=(1, 2, 3, 4)),
+      Row("bob", 10, Row("abc", "bang"), mutable.WrappedArray.newBuilder[Int].+=(1, 2, 3, 4)),
+      Row("bob", 10, Row("abc", "bang"), mutable.WrappedArray.newBuilder[Int].+=(1, 2, 3, 4))))
+  }
+
+  test("Read sdk writer Avro output with Array of struct for external table") {
+    buildAvroTestDataArrayOfStructType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql(s"""select count(*) from sdkTable"""),
+      Seq(Row(3)))
+  }
+
+  test("Read sdk writer Avro output with struct of Array for nontransactional table") {
+    buildAvroTestDataStructOfArrayType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql(s"""select count(*) from sdkTable"""),
+      Seq(Row(3)))
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8ef6bd1f/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
index 2f7d98b..c5aceaa 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
@@ -150,6 +150,7 @@ class SDVSuites3 extends Suites with BeforeAndAfterAll {
                     new LuceneTestCase ::
                     new TimeSeriesPreAggregateTestCase :: 
                     new TestPartitionWithGlobalSort ::
+                    new SDKwriterTestCase ::
                     new SetParameterTestCase ::
                     new PartitionWithPreAggregateTestCase :: Nil
 


[25/26] carbondata git commit: [CARBONDATA-2571] Calculating the carbonindex and carbondata file size of a table is wrong

Posted by ra...@apache.org.
[CARBONDATA-2571] Calculating the carbonindex and carbondata file size of a table is wrong

Problem:
While calculating the carbonindex files size, we are checking either index file or merge file. But in PR#2333, implementation is changed to fill both
the file name and the merge file name. So, we have to consider both fields.

Solution:
While calculating the carbonindex files size, we have to consider both the files and mergeFileName fields. We should get the list of index files from
these 2 fields and then calculate the size of the files.

This closes #2358


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/62e68fff
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/62e68fff
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/62e68fff

Branch: refs/heads/branch-1.4
Commit: 62e68fffbc2057f048716e55ede6fae853f1e358
Parents: d510e14
Author: dhatchayani <dh...@gmail.com>
Authored: Fri Jun 1 15:13:38 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:07:13 2018 +0530

----------------------------------------------------------------------
 .../apache/carbondata/core/util/CarbonUtil.java | 37 +++++++++++---------
 1 file changed, 20 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/62e68fff/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 1526047..5a7bce3 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2688,27 +2688,30 @@ public final class CarbonUtil {
       throws IOException {
     long carbonDataSize = 0L;
     long carbonIndexSize = 0L;
-    List<String> listOfFilesRead = new ArrayList<>();
     HashMap<String, Long> dataAndIndexSize = new HashMap<String, Long>();
-    if (fileStore.getLocationMap() != null) {
+    Map<String, SegmentFileStore.FolderDetails> locationMap = fileStore.getLocationMap();
+    if (locationMap != null) {
       fileStore.readIndexFiles();
-      Map<String, String> indexFiles = fileStore.getIndexFiles();
       Map<String, List<String>> indexFilesMap = fileStore.getIndexFilesMap();
-      for (Map.Entry<String, List<String>> entry : indexFilesMap.entrySet()) {
-        // get the size of carbonindex file
-        String indexFile = entry.getKey();
-        String mergeIndexFile = indexFiles.get(indexFile);
-        if (null != mergeIndexFile) {
-          String mergeIndexPath = indexFile
-              .substring(0, indexFile.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR) + 1)
-              + mergeIndexFile;
-          if (!listOfFilesRead.contains(mergeIndexPath)) {
-            carbonIndexSize += FileFactory.getCarbonFile(mergeIndexPath).getSize();
-            listOfFilesRead.add(mergeIndexPath);
-          }
-        } else {
-          carbonIndexSize += FileFactory.getCarbonFile(indexFile).getSize();
+      // get the size of carbonindex file
+      for (Map.Entry<String, SegmentFileStore.FolderDetails> entry : locationMap.entrySet()) {
+        SegmentFileStore.FolderDetails folderDetails = entry.getValue();
+        Set<String> carbonindexFiles = folderDetails.getFiles();
+        String mergeFileName = folderDetails.getMergeFileName();
+        if (null != mergeFileName) {
+          String mergeIndexPath =
+              fileStore.getTablePath() + entry.getKey() + CarbonCommonConstants.FILE_SEPARATOR
+                  + mergeFileName;
+          carbonIndexSize += FileFactory.getCarbonFile(mergeIndexPath).getSize();
         }
+        for (String indexFile : carbonindexFiles) {
+          String indexPath =
+              fileStore.getTablePath() + entry.getKey() + CarbonCommonConstants.FILE_SEPARATOR
+                  + indexFile;
+          carbonIndexSize += FileFactory.getCarbonFile(indexPath).getSize();
+        }
+      }
+      for (Map.Entry<String, List<String>> entry : indexFilesMap.entrySet()) {
         // get the size of carbondata files
         for (String blockFile : entry.getValue()) {
           carbonDataSize += FileFactory.getCarbonFile(blockFile).getSize();


[11/26] carbondata git commit: [CARBONDATA-2520] Clean and close datamap writers on any task failure during load

Posted by ra...@apache.org.
[CARBONDATA-2520] Clean and close datamap writers on any task failure during load

Problem: The datamap writers registered to listener are closed or finished only in case of load success case and not in any failure case. So when tesing lucene, it is found that, after task is failed and the writer is not closed, so the write.lock file written in the index folder of lucene is still exists, so when next task comes to write index in same directory, it fails with the error lock file already exists.

Solution: close the writers if any load task fails.

This closes #2321


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/f27fe0ad
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/f27fe0ad
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/f27fe0ad

Branch: refs/heads/branch-1.4
Commit: f27fe0ad1a7123a0bc627f3cd9394df255da95ce
Parents: 009ccaf
Author: akashrn5 <ak...@gmail.com>
Authored: Thu May 17 11:37:22 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../core/datamap/dev/DataMapWriter.java         | 10 ++++++++
 .../datamap/bloom/BloomDataMapWriter.java       |  9 +++++---
 .../datamap/lucene/LuceneDataMapWriter.java     | 12 ++++++----
 .../loading/AbstractDataLoadProcessorStep.java  | 24 ++++++++++++++++++++
 .../CarbonRowDataWriterProcessorStepImpl.java   |  4 +++-
 .../steps/DataWriterBatchProcessorStepImpl.java |  4 +++-
 .../steps/DataWriterProcessorStepImpl.java      | 23 +++++++++++++++++--
 .../store/CarbonFactDataHandlerModel.java       | 22 ++++++++++--------
 8 files changed, 87 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
index 03a369a..89d5d76 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
@@ -46,6 +46,8 @@ public abstract class DataMapWriter {
 
   private List<CarbonColumn> indexColumns;
 
+  private boolean isWritingFinished;
+
   public DataMapWriter(String tablePath, String dataMapName, List<CarbonColumn> indexColumns,
       Segment segment, String shardName) {
     this.tablePath = tablePath;
@@ -133,4 +135,12 @@ public abstract class DataMapWriter {
       String tablePath, String segmentId, String dataMapName) {
     return CarbonTablePath.getSegmentPath(tablePath, segmentId) + File.separator + dataMapName;
   }
+
+  public boolean isWritingFinished() {
+    return isWritingFinished;
+  }
+
+  public void setWritingFinished(boolean writingFinished) {
+    isWritingFinished = writingFinished;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
index b3e69f4..2791a6c 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
@@ -196,10 +196,13 @@ public class BloomDataMapWriter extends DataMapWriter {
 
   @Override
   public void finish() throws IOException {
-    if (indexBloomFilters.size() > 0) {
-      writeBloomDataMapFile();
+    if (!isWritingFinished()) {
+      if (indexBloomFilters.size() > 0) {
+        writeBloomDataMapFile();
+      }
+      releaseResouce();
+      setWritingFinished(true);
     }
-    releaseResouce();
   }
 
   protected void releaseResouce() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
index c7eb3d8..605ec89 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
@@ -447,10 +447,14 @@ public class LuceneDataMapWriter extends DataMapWriter {
    * class.
    */
   public void finish() throws IOException {
-    flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
-    // finished a file , close this index writer
-    if (indexWriter != null) {
-      indexWriter.close();
+    if (!isWritingFinished()) {
+      flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
+      // finished a file , close this index writer
+      if (indexWriter != null) {
+        indexWriter.close();
+        indexWriter = null;
+      }
+      setWritingFinished(true);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java b/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
index 9f2482b..eb02ede 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
@@ -25,8 +25,12 @@ import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
+import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants;
 import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
 import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
+import org.apache.carbondata.processing.store.CarbonDataFileAttributes;
 
 /**
  * This base abstract class for data loading.
@@ -149,6 +153,26 @@ public abstract class AbstractDataLoadProcessorStep {
    */
   protected abstract String getStepName();
 
+  /**
+   * This method registers all writer listeners and returns the listener
+   * @param bucketId bucketId
+   * @return
+   */
+  protected DataMapWriterListener getDataMapWriterListener(int bucketId) {
+    CarbonDataFileAttributes carbonDataFileAttributes =
+        new CarbonDataFileAttributes(Long.parseLong(configuration.getTaskNo()),
+            (Long) configuration.getDataLoadProperty(DataLoadProcessorConstants.FACT_TIME_STAMP));
+    DataMapWriterListener listener = new DataMapWriterListener();
+    listener.registerAllWriter(
+        configuration.getTableSpec().getCarbonTable(),
+        configuration.getSegmentId(),
+        CarbonTablePath.getShardName(
+            carbonDataFileAttributes.getTaskId(),
+            bucketId,
+            0,
+            String.valueOf(carbonDataFileAttributes.getFactTimeStamp())));
+    return listener;
+  }
 
   /**
    * Close all resources.This method is called after execute() is finished.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
index edf67a7..e465471 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.DataField;
@@ -156,8 +157,9 @@ public class CarbonRowDataWriterProcessorStepImpl extends AbstractDataLoadProces
 
   private void doExecute(Iterator<CarbonRowBatch> iterator, int iteratorIndex) throws IOException {
     String[] storeLocation = getStoreLocation(tableIdentifier);
+    DataMapWriterListener listener = getDataMapWriterListener(0);
     CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(
-        configuration, storeLocation, 0, iteratorIndex);
+        configuration, storeLocation, 0, iteratorIndex, listener);
     CarbonFactHandler dataHandler = null;
     boolean rowsNotExist = true;
     while (iterator.hasNext()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
index 369c1f2..78777ce 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
@@ -25,6 +25,7 @@ import org.apache.carbondata.core.datastore.row.CarbonRow;
 import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.DataField;
@@ -85,8 +86,9 @@ public class DataWriterBatchProcessorStepImpl extends AbstractDataLoadProcessorS
           CarbonRowBatch next = iterator.next();
           // If no rows from merge sorter, then don't create a file in fact column handler
           if (next.hasNext()) {
+            DataMapWriterListener listener = getDataMapWriterListener(0);
             CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel
-                .createCarbonFactDataHandlerModel(configuration, storeLocation, 0, k++);
+                .createCarbonFactDataHandlerModel(configuration, storeLocation, 0, k++, listener);
             CarbonFactHandler dataHandler = CarbonFactHandlerFactory
                 .createCarbonFactHandler(model, CarbonFactHandlerFactory.FactHandlerType.COLUMNAR);
             dataHandler.initialise();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
index b09fb7d..a0f29fa 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
@@ -36,6 +36,7 @@ import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.DataField;
@@ -57,6 +58,8 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
 
   private long readCounter;
 
+  private DataMapWriterListener listener;
+
   public DataWriterProcessorStepImpl(CarbonDataLoadConfiguration configuration,
       AbstractDataLoadProcessorStep child) {
     super(configuration, child);
@@ -88,8 +91,9 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
     CarbonTableIdentifier tableIdentifier =
         configuration.getTableIdentifier().getCarbonTableIdentifier();
     String[] storeLocation = getStoreLocation(tableIdentifier);
+    listener = getDataMapWriterListener(0);
     return CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(configuration,
-        storeLocation, 0, 0);
+        storeLocation, 0, 0, listener);
   }
 
   @Override public Iterator<CarbonRowBatch>[] execute() throws CarbonDataLoadingException {
@@ -162,8 +166,9 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
       CarbonTableIdentifier tableIdentifier, int rangeId) {
     String[] storeLocation = getStoreLocation(tableIdentifier);
 
+    listener = getDataMapWriterListener(rangeId);
     CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel
-        .createCarbonFactDataHandlerModel(configuration, storeLocation, rangeId, 0);
+        .createCarbonFactDataHandlerModel(configuration, storeLocation, rangeId, 0, listener);
     CarbonFactHandler dataHandler = null;
     boolean rowsNotExist = true;
     while (insideRangeIterator.hasNext()) {
@@ -247,4 +252,18 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
     return null;
   }
 
+  @Override public void close() {
+    if (!closed) {
+      super.close();
+      if (listener != null) {
+        try {
+          LOGGER.info("closing all the DataMap writers registered to DataMap writer listener");
+          listener.finish();
+        } catch (IOException e) {
+          LOGGER.error(e, "error while closing the datamap writers");
+          // ignoring the exception
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f27fe0ad/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
index a725936..87a6de0 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
@@ -168,7 +168,7 @@ public class CarbonFactDataHandlerModel {
    */
   public static CarbonFactDataHandlerModel createCarbonFactDataHandlerModel(
       CarbonDataLoadConfiguration configuration, String[] storeLocation, int bucketId,
-      int taskExtension) {
+      int taskExtension, DataMapWriterListener listener) {
     CarbonTableIdentifier identifier =
         configuration.getTableIdentifier().getCarbonTableIdentifier();
     boolean[] isUseInvertedIndex =
@@ -258,15 +258,17 @@ public class CarbonFactDataHandlerModel {
     carbonFactDataHandlerModel.tableSpec = configuration.getTableSpec();
     carbonFactDataHandlerModel.sortScope = CarbonDataProcessorUtil.getSortScope(configuration);
 
-    DataMapWriterListener listener = new DataMapWriterListener();
-    listener.registerAllWriter(
-        configuration.getTableSpec().getCarbonTable(),
-        configuration.getSegmentId(),
-        CarbonTablePath.getShardName(
-            carbonDataFileAttributes.getTaskId(),
-            bucketId,
-            0,
-            String.valueOf(carbonDataFileAttributes.getFactTimeStamp())));
+    if (listener == null) {
+      listener = new DataMapWriterListener();
+      listener.registerAllWriter(
+          configuration.getTableSpec().getCarbonTable(),
+          configuration.getSegmentId(),
+          CarbonTablePath.getShardName(
+              carbonDataFileAttributes.getTaskId(),
+              bucketId,
+              0,
+              String.valueOf(carbonDataFileAttributes.getFactTimeStamp())));
+    }
     carbonFactDataHandlerModel.dataMapWriterlistener = listener;
     carbonFactDataHandlerModel.writingCoresCount = configuration.getWritingCoresCount();
 


[17/26] carbondata git commit: [CARBONDATA-2355] Support run SQL on carbondata files directly

Posted by ra...@apache.org.
[CARBONDATA-2355] Support run SQL on carbondata files directly

Support run SQL on carbondata files directly

This closes #2181


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/75f638e3
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/75f638e3
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/75f638e3

Branch: refs/heads/branch-1.4
Commit: 75f638e35838e58a0677ae128e437d5cdf3b5abb
Parents: d0dc822
Author: xubo245 <60...@qq.com>
Authored: Wed Apr 18 17:34:12 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               |   7 ++
 .../carbondata/examples/DirectSQLExample.scala  | 100 +++++++++++++++++++
 .../carbondata/examples/S3UsingSDkExample.scala |   2 +-
 ...FileInputFormatWithExternalCarbonTable.scala |   2 +-
 ...tCreateTableUsingSparkCarbonFileFormat.scala |  30 +++++-
 .../TestNonTransactionalCarbonTable.scala       |   2 +-
 ...ransactionalCarbonTableWithComplexType.scala |   2 +-
 ...tSparkCarbonFileFormatWithSparkSession.scala |   2 +-
 .../datasources/SparkCarbonFileFormat.scala     |  26 ++++-
 9 files changed, 164 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 360516a..ec70919 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -128,7 +128,14 @@ Each of SQL data types are mapped into data types of SDK. Following are the mapp
 | STRING | DataTypes.STRING |
 | DECIMAL | DataTypes.createDecimalType(precision, scale) |
 
+## Run SQL on files directly
+Instead of creating table and query it, you can also query that file directly with SQL.
 
+### Example
+```
+SELECT * FROM carbonfile.`$Path`
+```
+Find example code at [DirectSQLExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala) in the CarbonData repo.
 ## API List
 
 ### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
new file mode 100644
index 0000000..a011d80
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import java.io.File
+
+import org.apache.commons.io.FileUtils
+
+import org.apache.carbondata.core.metadata.datatype.DataTypes
+import org.apache.carbondata.examples.util.ExampleUtils
+import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
+
+/**
+ * Running SQL on carbon files directly
+ * No need to create table first
+ * TODO: support more than one carbon file
+ */
+object DirectSQLExample {
+
+  // prepare SDK writer output
+  def buildTestData(
+      path: String,
+      num: Int = 3,
+      persistSchema: Boolean = false): Any = {
+
+    // getCanonicalPath gives path with \, but the code expects /.
+    val writerPath = path.replace("\\", "/");
+
+    val fields: Array[Field] = new Array[Field](3)
+    fields(0) = new Field("name", DataTypes.STRING)
+    fields(1) = new Field("age", DataTypes.INT)
+    fields(2) = new Field("height", DataTypes.DOUBLE)
+
+    try {
+      val builder = CarbonWriter
+        .builder()
+        .outputPath(writerPath)
+        .isTransactionalTable(true)
+        .uniqueIdentifier(System.currentTimeMillis)
+        .withBlockSize(2)
+      if (persistSchema) {
+        builder.persistSchemaFile(true)
+      }
+      val writer = builder.buildWriterForCSVInput(new Schema(fields))
+      var i = 0
+      while (i < num) {
+        writer.write(Array[String]("robot" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
+        i += 1
+      }
+      writer.close()
+    } catch {
+      case e: Exception => throw e
+    }
+  }
+
+  def cleanTestData(path: String): Unit = {
+    FileUtils.deleteDirectory(new File(path))
+  }
+
+  // scalastyle:off
+  def main(args: Array[String]) {
+    val carbonSession = ExampleUtils.createCarbonSession("DirectSQLExample")
+    val rootPath = new File(this.getClass.getResource("/").getPath
+      + "../../../..").getCanonicalPath
+    val path = s"$rootPath/examples/spark2/target/carbonFile/"
+
+    import carbonSession._
+    // 1. generate data file
+    cleanTestData(path)
+    buildTestData(path, 20)
+    val readPath = path + "Fact/Part0/Segment_null"
+
+    println("Running SQL on carbon files directly")
+    try {
+      // 2. run queries directly, no need to create table first
+      sql(s"""select * FROM  carbonfile.`$readPath` limit 10""".stripMargin).show()
+    } catch {
+      case e: Exception => throw e
+    } finally {
+      // 3.delete data files
+      cleanTestData(path)
+    }
+  }
+  // scalastyle:on
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
index 022b28e..1795960 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
@@ -36,7 +36,7 @@ object S3UsingSDKExample {
       num: Int = 3,
       persistSchema: Boolean = false): Any = {
 
-    // getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+    // getCanonicalPath gives path with \, but the code expects /.
     val writerPath = path.replace("\\", "/");
 
     val fields: Array[Field] = new Array[Field](3)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
index 019b915..e6d39d3 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
@@ -38,7 +38,7 @@ class TestCarbonFileInputFormatWithExternalCarbonTable extends QueryTest with Be
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/");
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
index 66be8e4..211bc8c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
@@ -46,7 +46,7 @@ class TestCreateTableUsingSparkCarbonFileFormat extends QueryTest with BeforeAnd
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/");
 
   val filePath = writerPath + "/Fact/Part0/Segment_null/"
@@ -153,6 +153,34 @@ class TestCreateTableUsingSparkCarbonFileFormat extends QueryTest with BeforeAnd
     cleanTestData()
   }
 
+  test("Running SQL directly and read carbondata files (sdk Writer Output) using the SparkCarbonFileFormat ") {
+    buildTestData(false)
+    assert(new File(filePath).exists())
+    sql("DROP TABLE IF EXISTS sdkOutputTable")
+
+    //data source file format
+    if (sqlContext.sparkContext.version.startsWith("2.1")) {
+      //data source file format
+      sql(s"""CREATE TABLE sdkOutputTable USING carbonfile OPTIONS (PATH '$filePath') """)
+    } else if (sqlContext.sparkContext.version.startsWith("2.2")) {
+      //data source file format
+      sql(
+        s"""CREATE TABLE sdkOutputTable USING carbonfile LOCATION
+           |'$filePath' """.stripMargin)
+    } else {
+      // TO DO
+    }
+
+    val directSQL = sql(s"""select * FROM  carbonfile.`$filePath`""".stripMargin)
+    directSQL.show(false)
+    checkAnswer(sql("select * from sdkOutputTable"), directSQL)
+
+    sql("DROP TABLE sdkOutputTable")
+    // drop table should not delete the files
+    assert(new File(filePath).exists())
+    cleanTestData()
+  }
+
 
   test("should not allow to alter datasource carbontable ") {
     buildTestData(false)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 61b37d5..0083733 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -55,7 +55,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
                             "../." +
                             "./target/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/")
 
   def buildTestDataSingleFile(): Any = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
index d4de428..19aaf72 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
@@ -39,7 +39,7 @@ class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with Befo
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/")
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
index 54b23a5..79b64ae 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
@@ -36,7 +36,7 @@ object TestSparkCarbonFileFormatWithSparkSession {
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/");
 
   val filePath = writerPath + "/Fact/Part0/Segment_null/"

http://git-wip-us.apache.org/repos/asf/carbondata/blob/75f638e3/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
index 934f5c7..697eec5 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.datasources
 
 import java.net.URI
 
+import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import org.apache.hadoop.conf.Configuration
@@ -68,8 +69,23 @@ class SparkCarbonFileFormat extends FileFormat
   override def inferSchema(sparkSession: SparkSession,
       options: Map[String, String],
       files: Seq[FileStatus]): Option[StructType] = {
-    val filePaths = CarbonUtil.getFilePathExternalFilePath(
-      options.get("path").get)
+    val filePaths = if (options.isEmpty) {
+      val carbondataFiles = files.seq.filter { each =>
+        if (each.isFile) {
+          each.getPath.getName.contains(".carbondata")
+        } else {
+          false
+        }
+      }
+
+      carbondataFiles.map { each =>
+        each.getPath.toString
+      }.toList.asJava
+    } else {
+      CarbonUtil.getFilePathExternalFilePath(
+        options.get("path").get)
+    }
+
     if (filePaths.size() == 0) {
       throw new SparkException("CarbonData file is not present in the location mentioned in DDL")
     }
@@ -193,7 +209,11 @@ class SparkCarbonFileFormat extends FileFormat
         val fileSplit =
           new FileSplit(new Path(new URI(file.filePath)), file.start, file.length, Array.empty)
 
-        val path: String = options.get("path").get
+        val path: String = if (options.isEmpty) {
+          file.filePath
+        } else {
+          options.get("path").get
+        }
         val endindex: Int = path.indexOf("Fact") - 1
         val tablePath = path.substring(0, endindex)
         lazy val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.from(


[05/26] carbondata git commit: [CARBONDATA-2514] Added condition to check for duplicate column names

Posted by ra...@apache.org.
[CARBONDATA-2514] Added condition to check for duplicate column names

1. Duplicate columns check was not present.
2. IndexFileReader was not being closed due to which index file could not be deleted.

This closes #2332


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d4f9c340
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d4f9c340
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d4f9c340

Branch: refs/heads/branch-1.4
Commit: d4f9c3401740091e098e9e2fb3a888e5755d6dc6
Parents: b401a9f
Author: kunal642 <ku...@gmail.com>
Authored: Tue May 22 15:16:32 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../apache/carbondata/core/util/CarbonUtil.java | 44 +++++++++++---------
 .../carbondata/core/util/DataTypeUtil.java      |  2 +
 .../sdk/file/CarbonWriterBuilder.java           |  7 ++++
 .../sdk/file/AvroCarbonWriterTest.java          | 40 ++++++++++++++++++
 4 files changed, 73 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d4f9c340/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 9dc4aa2..23d02ef 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2380,27 +2380,31 @@ public final class CarbonUtil {
   public static org.apache.carbondata.format.TableInfo inferSchemaFromIndexFile(
       String indexFilePath, String tableName) throws IOException {
     CarbonIndexFileReader indexFileReader = new CarbonIndexFileReader();
-    indexFileReader.openThriftReader(indexFilePath);
-    org.apache.carbondata.format.IndexHeader readIndexHeader = indexFileReader.readIndexHeader();
-    List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
-    List<org.apache.carbondata.format.ColumnSchema> table_columns =
-        readIndexHeader.getTable_columns();
-    for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+    try {
+      indexFileReader.openThriftReader(indexFilePath);
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexFileReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (int i = 0; i < table_columns.size(); i++) {
+        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      }
+      // only columnSchema is the valid entry, reset all dummy entries.
+      TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
+
+      ThriftWrapperSchemaConverterImpl thriftWrapperSchemaConverter =
+          new ThriftWrapperSchemaConverterImpl();
+      org.apache.carbondata.format.TableSchema thriftFactTable =
+          thriftWrapperSchemaConverter.fromWrapperToExternalTableSchema(tableSchema);
+      org.apache.carbondata.format.TableInfo tableInfo =
+          new org.apache.carbondata.format.TableInfo(thriftFactTable,
+              new ArrayList<org.apache.carbondata.format.TableSchema>());
+
+      tableInfo.setDataMapSchemas(null);
+      return tableInfo;
+    } finally {
+      indexFileReader.closeThriftReader();
     }
-    // only columnSchema is the valid entry, reset all dummy entries.
-    TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
-
-    ThriftWrapperSchemaConverterImpl thriftWrapperSchemaConverter =
-        new ThriftWrapperSchemaConverterImpl();
-    org.apache.carbondata.format.TableSchema thriftFactTable =
-        thriftWrapperSchemaConverter.fromWrapperToExternalTableSchema(tableSchema);
-    org.apache.carbondata.format.TableInfo tableInfo =
-        new org.apache.carbondata.format.TableInfo(thriftFactTable,
-            new ArrayList<org.apache.carbondata.format.TableSchema>());
-
-    tableInfo.setDataMapSchemas(null);
-    return tableInfo;
   }
 
   private static TableSchema getDummyTableSchema(String tableName,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d4f9c340/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index f7f71b3..e06c82e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -341,6 +341,7 @@ public final class DataTypeUtil {
       try {
         if (null != dateFormat && !dateFormat.trim().isEmpty()) {
           dateFormatter = new SimpleDateFormat(dateFormat);
+          dateFormatter.setLenient(false);
         } else {
           dateFormatter = timeStampformatter.get();
         }
@@ -376,6 +377,7 @@ public final class DataTypeUtil {
       try {
         if (null != dateFormat && !dateFormat.trim().isEmpty()) {
           dateFormatter = new SimpleDateFormat(dateFormat);
+          dateFormatter.setLenient(false);
         } else {
           dateFormatter = timeStampformatter.get();
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d4f9c340/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index e846da4..2277ab0 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -21,9 +21,11 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -421,6 +423,7 @@ public class CarbonWriterBuilder {
 
   private void buildTableSchema(Field[] fields, TableSchemaBuilder tableSchemaBuilder,
       List<String> sortColumnsList, ColumnSchema[] sortColumnsSchemaList) {
+    Set<String> uniqueFields = new HashSet<>();
     // a counter which will be used in case of complex array type. This valIndex will be assigned
     // to child of complex array type in the order val1, val2 so that each array type child is
     // differentiated to any level
@@ -442,6 +445,10 @@ public class CarbonWriterBuilder {
     int i = 0;
     for (Field field : fields) {
       if (null != field) {
+        if (!uniqueFields.add(field.getFieldName())) {
+          throw new RuntimeException(
+              "Duplicate column " + field.getFieldName() + " found in table schema");
+        }
         int isSortColumn = sortColumnsList.indexOf(field.getFieldName());
         if (isSortColumn > -1) {
           // unsupported types for ("array", "struct", "double", "float", "decimal")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d4f9c340/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
index b70e74d..03a4f47 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
@@ -21,9 +21,12 @@ import java.io.File;
 import java.io.FileFilter;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
+import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import org.apache.avro.generic.GenericData;
@@ -450,6 +453,43 @@ public class AvroCarbonWriterTest {
     FileUtils.deleteDirectory(new File(path));
   }
 
+  @Test
+  public void testExceptionForDuplicateColumns() throws IOException, InvalidLoadOptionException {
+    Field[] field = new Field[2];
+    field[0] = new Field("name", DataTypes.STRING);
+    field[1] = new Field("name", DataTypes.STRING);
+    CarbonWriterBuilder writer = CarbonWriter.builder().isTransactionalTable(false)
+        .uniqueIdentifier(System.currentTimeMillis()).outputPath(path);
+
+    try {
+      writer.buildWriterForCSVInput(new org.apache.carbondata.sdk.file.Schema(field));
+      Assert.fail();
+    } catch (Exception e) {
+      assert(e.getMessage().contains("Duplicate column name found in table schema"));
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
 
+  @Test
+  public void testExceptionForInvalidDate() throws IOException, InvalidLoadOptionException {
+    Field[] field = new Field[2];
+    field[0] = new Field("name", DataTypes.STRING);
+    field[1] = new Field("date", DataTypes.DATE);
+    CarbonWriterBuilder writer = CarbonWriter.builder().isTransactionalTable(false)
+        .uniqueIdentifier(System.currentTimeMillis()).outputPath(path);
+
+    try {
+      Map<String, String> loadOptions = new HashMap<String, String>();
+      loadOptions.put("bad_records_action", "fail");
+      CarbonWriter carbonWriter =
+          writer.isTransactionalTable(false).withLoadOptions(loadOptions).buildWriterForCSVInput(new org.apache.carbondata.sdk.file.Schema(field));
+      carbonWriter.write(new String[] { "k", "20-02-2233" });
+      carbonWriter.close();
+      Assert.fail();
+    } catch (Exception e) {
+      assert(e.getMessage().contains("Data load failed due to bad record"));
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
 
 }


[08/26] carbondata git commit: [CARBONDATA-2545] Fix some spell error in CarbonData

Posted by ra...@apache.org.
[CARBONDATA-2545] Fix some spell error in CarbonData

Change Inerface to Interface

This closes #2346


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/04332535
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/04332535
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/04332535

Branch: refs/heads/branch-1.4
Commit: 043325358432cd8ef6d0c98a662ee8d62200cbca
Parents: 8b73585
Author: xubo245 <xu...@huawei.com>
Authored: Mon May 28 11:47:27 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../datastore/impl/DefaultFileTypeProvider.java |  2 +-
 .../core/datastore/impl/FileFactory.java        | 14 ++++-----
 .../core/datastore/impl/FileTypeInerface.java   | 32 --------------------
 .../core/datastore/impl/FileTypeInterface.java  | 32 ++++++++++++++++++++
 4 files changed, 40 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/04332535/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
index b58a473..f54e9af 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
@@ -26,7 +26,7 @@ import org.apache.carbondata.core.datastore.filesystem.ViewFSCarbonFile;
 
 import org.apache.hadoop.conf.Configuration;
 
-public class DefaultFileTypeProvider implements FileTypeInerface {
+public class DefaultFileTypeProvider implements FileTypeInterface {
 
   public FileReader getFileHolder(FileFactory.FileType fileType) {
     switch (fileType) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04332535/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
index 1529649..5c46bcf 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
@@ -50,9 +50,9 @@ public final class FileFactory {
     configuration.addResource(new Path("../core-default.xml"));
   }
 
-  private static FileTypeInerface fileFileTypeInerface = new DefaultFileTypeProvider();
-  public static void setFileTypeInerface(FileTypeInerface fileTypeInerface) {
-    fileFileTypeInerface = fileTypeInerface;
+  private static FileTypeInterface fileFileTypeInterface = new DefaultFileTypeProvider();
+  public static void setFileTypeInterface(FileTypeInterface fileTypeInterface) {
+    fileFileTypeInterface = fileTypeInterface;
   }
   private FileFactory() {
 
@@ -63,7 +63,7 @@ public final class FileFactory {
   }
 
   public static FileReader getFileHolder(FileType fileType) {
-    return fileFileTypeInerface.getFileHolder(fileType);
+    return fileFileTypeInterface.getFileHolder(fileType);
   }
 
   public static FileType getFileType(String path) {
@@ -83,14 +83,14 @@ public final class FileFactory {
   }
 
   public static CarbonFile getCarbonFile(String path) {
-    return fileFileTypeInerface.getCarbonFile(path, getFileType(path));
+    return fileFileTypeInterface.getCarbonFile(path, getFileType(path));
   }
   public static CarbonFile getCarbonFile(String path, FileType fileType) {
-    return fileFileTypeInerface.getCarbonFile(path, fileType);
+    return fileFileTypeInterface.getCarbonFile(path, fileType);
   }
   public static CarbonFile getCarbonFile(String path, FileType fileType,
       Configuration hadoopConf) {
-    return fileFileTypeInerface.getCarbonFile(path, fileType, hadoopConf);
+    return fileFileTypeInterface.getCarbonFile(path, fileType, hadoopConf);
   }
 
   public static DataInputStream getDataInputStream(String path, FileType fileType)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04332535/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java
deleted file mode 100644
index 413261c..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.impl;
-
-import org.apache.carbondata.core.datastore.FileReader;
-import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
-
-import org.apache.hadoop.conf.Configuration;
-
-public interface FileTypeInerface {
-
-  FileReader getFileHolder(FileFactory.FileType fileType);
-  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType);
-  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType, Configuration configuration);
-
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/04332535/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
new file mode 100644
index 0000000..84da148
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.impl;
+
+import org.apache.carbondata.core.datastore.FileReader;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+
+import org.apache.hadoop.conf.Configuration;
+
+public interface FileTypeInterface {
+
+  FileReader getFileHolder(FileFactory.FileType fileType);
+  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType);
+  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType, Configuration configuration);
+
+}
+


[07/26] carbondata git commit: [CARBONDATA-2552]Fix Data Mismatch for Complex Data type Array of Timestamp with Dictionary Include

Posted by ra...@apache.org.
[CARBONDATA-2552]Fix Data Mismatch for Complex Data type Array of Timestamp with Dictionary Include

Fix Data Mismatch for Complex Data type Array and Struct of Timestamp with Dictionary Include


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/b792b3eb
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/b792b3eb
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/b792b3eb

Branch: refs/heads/branch-1.4
Commit: b792b3eba32262681dcfc0fd9f276a00bee0e771
Parents: c7fff9e
Author: Indhumathi27 <in...@gmail.com>
Authored: Mon May 28 22:17:36 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:04:20 2018 +0530

----------------------------------------------------------------------
 .../core/scan/complextypes/PrimitiveQueryType.java  |  2 +-
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala   | 16 ++++++++++++++++
 2 files changed, 17 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/b792b3eb/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java b/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
index 899957e..d7723b3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
@@ -95,7 +95,7 @@ public class PrimitiveQueryType extends ComplexQueryType implements GenericQuery
       DimensionRawColumnChunk[] rawColumnChunks, int rowNumber,
       int pageNumber, DataOutputStream dataOutputStream) throws IOException {
     byte[] currentVal = copyBlockDataChunk(rawColumnChunks, rowNumber, pageNumber);
-    if (!this.isDictionary) {
+    if (!this.isDictionary && !this.isDirectDictionary) {
       dataOutputStream.writeShort(currentVal.length);
     }
     dataOutputStream.write(currentVal);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b792b3eb/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
index d0d578d..7f150be 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
@@ -365,6 +365,22 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends QueryTest with BeforeAndAf
     sql("drop table if exists complexcarbontable")
   }
 
+  test("test Complex Data type - Array and Struct of timestamp with dictionary include") {
+    sql("DROP TABLE IF EXISTS array_timestamp")
+    sql(
+      "create table array_timestamp (date1 array<timestamp>,date2 struct<date:timestamp> ) stored" +
+      " by 'carbondata' tblproperties" +
+      "('dictionary_include'='date1,date2')")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    sql("insert into array_timestamp values('2015/01/01$2016/01/01','2017/01/01')")
+    checkExistence(sql("select * from array_timestamp "),
+      true, "2015-01-01 00:00:00.0, 2016-01-01 00:00:00.0")
+    checkExistence(sql("select * from array_timestamp "),
+      true, "2017-01-01 00:00:00.0")
+    sql("DROP TABLE IF EXISTS array_timestamp")
+  }
+
   test("array<string> and string datatype for same column is not working properly") {
     sql("drop table if exists complexcarbontable")
     sql("create table complexcarbontable(deviceInformationId int, MAC array<string>, channelsId string, "+