You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2018/06/08 11:40:57 UTC

[32/50] [abbrv] carbondata git commit: [CARBONDATA-2500] Add new API to read user's schema in SDK

[CARBONDATA-2500] Add new API to read user's schema in SDK

The field order in schema that SDK returns is different between write and read data type of schema in SDK

This closes #2341


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8896a633
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8896a633
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8896a633

Branch: refs/heads/spark-2.3
Commit: 8896a63342fdc5257665131074b168e24ceb5b72
Parents: 22d5035
Author: xubo245 <xu...@huawei.com>
Authored: Fri May 25 16:07:55 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Tue May 29 15:33:41 2018 +0800

----------------------------------------------------------------------
 .../ThriftWrapperSchemaConverterImpl.java       |  12 +-
 .../core/metadata/schema/table/CarbonTable.java |   6 +-
 .../core/metadata/schema/table/TableInfo.java   |   4 +-
 .../core/metadata/schema/table/TableSchema.java |  14 +-
 .../schema/table/TableSchemaBuilder.java        |   2 +-
 .../core/reader/CarbonHeaderReader.java         |   4 +-
 .../util/AbstractDataFileFooterConverter.java   |   6 +-
 .../apache/carbondata/core/util/CarbonUtil.java |   8 +-
 .../core/util/DataFileFooterConverter.java      |   4 +-
 .../core/util/DataFileFooterConverter2.java     |   2 +-
 .../core/util/DataFileFooterConverterV3.java    |   4 +-
 .../ThriftWrapperSchemaConverterImplTest.java   |   4 +-
 .../hadoop/testutil/StoreCreator.java           |   2 +-
 .../presto/util/CarbonDataStoreCreator.scala    |   2 +-
 .../command/carbonTableSchemaCommon.scala       |   2 +-
 .../org/apache/spark/sql/CarbonSource.scala     |   2 +-
 .../datasources/SparkCarbonFileFormat.scala     |   2 +-
 .../spark/sql/hive/CarbonFileMetastore.scala    |   2 +-
 .../spark/sql/hive/CarbonHiveMetaStore.scala    |   2 +-
 .../carbondata/sdk/file/CarbonReader.java       |  63 +++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 454 ++++++++++++++++++-
 21 files changed, 549 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
index 644e6a3..f03b997 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
@@ -269,7 +269,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
       thriftColumnSchema.add(fromWrapperToExternalColumnSchema(wrapperColumnSchema));
     }
     org.apache.carbondata.format.SchemaEvolution schemaEvolution =
-        fromWrapperToExternalSchemaEvolution(wrapperTableSchema.getSchemaEvalution());
+        fromWrapperToExternalSchemaEvolution(wrapperTableSchema.getSchemaEvolution());
     org.apache.carbondata.format.TableSchema externalTableSchema =
         new org.apache.carbondata.format.TableSchema(
             wrapperTableSchema.getTableId(), thriftColumnSchema, schemaEvolution);
@@ -535,7 +535,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
         externalColumnSchema.getParentColumnTableRelations();
     if (null != parentColumnTableRelation) {
       wrapperColumnSchema.setParentColumnTableRelations(
-          fromExtrenalToWrapperParentTableColumnRelations(parentColumnTableRelation));
+          fromExternalToWrapperParentTableColumnRelations(parentColumnTableRelation));
     }
     return wrapperColumnSchema;
   }
@@ -595,11 +595,11 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
       listOfColumns.add(fromExternalToWrapperColumnSchema(externalColumnSchema));
     }
     wrapperTableSchema.setListOfColumns(listOfColumns);
-    wrapperTableSchema.setSchemaEvalution(
+    wrapperTableSchema.setSchemaEvolution(
         fromExternalToWrapperSchemaEvolution(externalTableSchema.getSchema_evolution()));
     if (externalTableSchema.isSetBucketingInfo()) {
       wrapperTableSchema.setBucketingInfo(
-          fromExternalToWarpperBucketingInfo(externalTableSchema.bucketingInfo));
+          fromExternalToWrapperBucketingInfo(externalTableSchema.bucketingInfo));
     }
     if (externalTableSchema.getPartitionInfo() != null) {
       wrapperTableSchema.setPartitionInfo(
@@ -608,7 +608,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
     return wrapperTableSchema;
   }
 
-  private BucketingInfo fromExternalToWarpperBucketingInfo(
+  private BucketingInfo fromExternalToWrapperBucketingInfo(
       org.apache.carbondata.format.BucketingInfo externalBucketInfo) {
     List<ColumnSchema> listOfColumns = new ArrayList<ColumnSchema>();
     for (org.apache.carbondata.format.ColumnSchema externalColumnSchema :
@@ -661,7 +661,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
     return childSchema;
   }
 
-  private List<ParentColumnTableRelation> fromExtrenalToWrapperParentTableColumnRelations(
+  private List<ParentColumnTableRelation> fromExternalToWrapperParentTableColumnRelations(
       List<org.apache.carbondata.format.ParentColumnTableRelation> thirftParentColumnRelation) {
     List<ParentColumnTableRelation> parentColumnTableRelationList = new ArrayList<>();
     for (org.apache.carbondata.format.ParentColumnTableRelation carbonTableRelation :

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index b1ed981..ba051be 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -566,15 +566,15 @@ public class CarbonTable implements Serializable {
     List<CarbonDimension> dimensions = tableDimensionsMap.get(tableName);
     List<CarbonMeasure> measures = tableMeasuresMap.get(tableName);
     List<CarbonColumn> columnList = new ArrayList<>(dimensions.size() + measures.size());
-    List<CarbonColumn> complexdimensionList = new ArrayList<>(dimensions.size());
+    List<CarbonColumn> complexDimensionList = new ArrayList<>(dimensions.size());
     for (CarbonColumn column : dimensions) {
       if (column.isComplex()) {
-        complexdimensionList.add(column);
+        complexDimensionList.add(column);
       } else {
         columnList.add(column);
       }
     }
-    columnList.addAll(complexdimensionList);
+    columnList.addAll(complexDimensionList);
     for (CarbonColumn column : measures) {
       if (!(column.getColName().equals("default_dummy_measure"))) {
         columnList.add(column);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
index c7bcf2e..38145e5 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
@@ -124,13 +124,13 @@ public class TableInfo implements Serializable, Writable {
   }
 
   private void updateIsSchemaModified() {
-    if (null != factTable.getSchemaEvalution()) {
+    if (null != factTable.getSchemaEvolution()) {
       // If schema evolution entry list size is > 1 that means an alter operation is performed
       // which has added the new schema entry in the schema evolution list.
       // Currently apart from create table schema evolution entries
       // are getting added only in the alter operations.
       isSchemaModified =
-          factTable.getSchemaEvalution().getSchemaEvolutionEntryList().size() > 1 ? true : false;
+          factTable.getSchemaEvolution().getSchemaEvolutionEntryList().size() > 1 ? true : false;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
index f008821..3d9e068 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
@@ -61,7 +61,7 @@ public class TableSchema implements Serializable, Writable {
   /**
    * History of schema evolution of this table
    */
-  private SchemaEvolution schemaEvalution;
+  private SchemaEvolution schemaEvolution;
 
   /**
    * contains all key value pairs for table properties set by user in craete DDL
@@ -112,17 +112,17 @@ public class TableSchema implements Serializable, Writable {
   }
 
   /**
-   * @return the schemaEvalution
+   * @return the schemaEvolution
    */
-  public SchemaEvolution getSchemaEvalution() {
-    return schemaEvalution;
+  public SchemaEvolution getSchemaEvolution() {
+    return schemaEvolution;
   }
 
   /**
-   * @param schemaEvalution the schemaEvalution to set
+   * @param schemaEvolution the schemaEvolution to set
    */
-  public void setSchemaEvalution(SchemaEvolution schemaEvalution) {
-    this.schemaEvalution = schemaEvalution;
+  public void setSchemaEvolution(SchemaEvolution schemaEvolution) {
+    this.schemaEvolution = schemaEvolution;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
index 03d03f8..bb7e901 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
@@ -90,7 +90,7 @@ public class TableSchemaBuilder {
     schema.setBucketingInfo(null);
     SchemaEvolution schemaEvol = new SchemaEvolution();
     schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    schema.setSchemaEvalution(schemaEvol);
+    schema.setSchemaEvolution(schemaEvol);
     List<ColumnSchema> allColumns = new LinkedList<>(sortColumns);
     allColumns.addAll(dimension);
     allColumns.addAll(complex);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
index 9bbdca9..dfd5815 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
@@ -23,7 +23,7 @@ import java.util.List;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.format.FileHeader;
 
-import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchmeaToWrapperColumnSchema;
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.thrift.TBase;
 
@@ -78,7 +78,7 @@ public class CarbonHeaderReader {
     List<ColumnSchema> columnSchemaList = new ArrayList<>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (org.apache.carbondata.format.ColumnSchema table_column : table_columns) {
-      ColumnSchema col = thriftColumnSchmeaToWrapperColumnSchema(table_column);
+      ColumnSchema col = thriftColumnSchemaToWrapperColumnSchema(table_column);
       col.setColumnReferenceId(col.getColumnUniqueId());
       columnSchemaList.add(col);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index e30ad03..f005d88 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -89,7 +89,7 @@ public abstract class AbstractDataFileFooterConverter {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // get the segment info
       SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
@@ -151,7 +151,7 @@ public abstract class AbstractDataFileFooterConverter {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // get the segment info
       SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
@@ -284,7 +284,7 @@ public abstract class AbstractDataFileFooterConverter {
     return blockletIndex;
   }
 
-  protected ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+  protected ColumnSchema thriftColumnSchemaToWrapperColumnSchema(
       org.apache.carbondata.format.ColumnSchema externalColumnSchema) {
     ColumnSchema wrapperColumnSchema = new ColumnSchema();
     wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 9ccd772..1526047 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2204,7 +2204,7 @@ public final class CarbonUtil {
     return tableInfo;
   }
 
-  public static ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+  public static ColumnSchema thriftColumnSchemaToWrapperColumnSchema(
       org.apache.carbondata.format.ColumnSchema externalColumnSchema) {
     ColumnSchema wrapperColumnSchema = new ColumnSchema();
     wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());
@@ -2387,7 +2387,7 @@ public final class CarbonUtil {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // only columnSchema is the valid entry, reset all dummy entries.
       TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
@@ -2412,7 +2412,7 @@ public final class CarbonUtil {
     TableSchema tableSchema = new TableSchema();
     tableSchema.setTableName(tableName);
     tableSchema.setBucketingInfo(null);
-    tableSchema.setSchemaEvalution(null);
+    tableSchema.setSchemaEvolution(null);
     tableSchema.setTableId(UUID.randomUUID().toString());
     tableSchema.setListOfColumns(columnSchemaList);
 
@@ -2422,7 +2422,7 @@ public final class CarbonUtil {
     List<SchemaEvolutionEntry> schEntryList = new ArrayList<>();
     schEntryList.add(schemaEvolutionEntry);
     schemaEvol.setSchemaEvolutionEntryList(schEntryList);
-    tableSchema.setSchemaEvalution(schemaEvol);
+    tableSchema.setSchemaEvolution(schemaEvol);
     return tableSchema;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
index d665379..670536e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
@@ -60,7 +60,7 @@ public class DataFileFooterConverter extends AbstractDataFileFooterConverter {
       List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
       List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       dataFileFooter.setColumnInTable(columnSchemaList);
 
@@ -135,7 +135,7 @@ public class DataFileFooterConverter extends AbstractDataFileFooterConverter {
       FileFooter footer = reader.readFooter();
       List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
     } finally {
       if (null != fileReader) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
index 863e1df..07391dc 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
@@ -51,7 +51,7 @@ public class DataFileFooterConverter2 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     dataFileFooter.setColumnInTable(columnSchemaList);
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
index 214e217..6a968b4 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
@@ -61,7 +61,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     dataFileFooter.setColumnInTable(columnSchemaList);
     List<org.apache.carbondata.format.BlockletIndex> leaf_node_indices_Thrift =
@@ -91,7 +91,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     return columnSchemaList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java b/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
index d4bb344..67c7594 100644
--- a/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
@@ -1421,7 +1421,7 @@ public class ThriftWrapperSchemaConverterImplTest {
         return columnSchemas;
       }
 
-      @Mock public SchemaEvolution getSchemaEvalution() {
+      @Mock public SchemaEvolution getSchemaEvolution() {
         return schemaEvolution;
       }
 
@@ -1537,7 +1537,7 @@ public class ThriftWrapperSchemaConverterImplTest {
       final SchemaEvolution schemaEvolution = new SchemaEvolution();
       final Map mapTableProperties = new HashMap<String, String>();
 
-      @Mock public SchemaEvolution getSchemaEvalution() {
+      @Mock public SchemaEvolution getSchemaEvolution() {
         return schemaEvolution;
       }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 9fd1812..63acad3 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -278,7 +278,7 @@ public class StoreCreator {
     tableSchema.setListOfColumns(columnSchemas);
     SchemaEvolution schemaEvol = new SchemaEvolution();
     schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    tableSchema.setSchemaEvalution(schemaEvol);
+    tableSchema.setSchemaEvolution(schemaEvol);
     tableSchema.setTableId(UUID.randomUUID().toString());
     tableInfo.setTableUniqueName(
         identifier.getCarbonTableIdentifier().getTableUniqueName()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
----------------------------------------------------------------------
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
index 6a8c40d..f4415b8 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
@@ -310,7 +310,7 @@ object CarbonDataStoreCreator {
     val schemaEvol: SchemaEvolution = new SchemaEvolution()
     schemaEvol.setSchemaEvolutionEntryList(
       new util.ArrayList[SchemaEvolutionEntry]())
-    tableSchema.setSchemaEvalution(schemaEvol)
+    tableSchema.setSchemaEvolution(schemaEvol)
     tableSchema.setTableId(UUID.randomUUID().toString)
     tableInfo.setTableUniqueName(
       absoluteTableIdentifier.getCarbonTableIdentifier.getTableUniqueName

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index a830185..aa40a1f 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -694,7 +694,7 @@ class TableNewProcessor(cm: TableModel) {
     }
     tableSchema.setTableName(cm.tableName)
     tableSchema.setListOfColumns(allColumns.asJava)
-    tableSchema.setSchemaEvalution(schemaEvol)
+    tableSchema.setSchemaEvolution(schemaEvol)
     tableInfo.setDatabaseName(cm.databaseNameOp.getOrElse(null))
     tableInfo.setTableUniqueName(CarbonTable.buildUniqueName(cm.databaseNameOp.getOrElse(null),
       cm.tableName))

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index 8376136..0a23d06 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -358,7 +358,7 @@ object CarbonSource {
     tableInfo.setDatabaseName(identifier.getDatabaseName)
     val schemaEvolutionEntry = new SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     val map = if (!metaStore.isReadFromHiveMetaStore && isTransactionalTable) {
       metaStore.saveToDisk(tableInfo, identifier.getTablePath)
       new java.util.HashMap[String, String]()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
index 1da6507..934f5c7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
@@ -79,7 +79,7 @@ class SparkCarbonFileFormat extends FileFormat
       .getColumn_schema
     var colArray = ArrayBuffer[StructField]()
     for (i <- 0 to table_columns.size() - 1) {
-      val col = CarbonUtil.thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i))
+      val col = CarbonUtil.thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i))
       colArray += (new StructField(col.getColumnName,
         CarbonScalaUtil.convertCarbonToSparkDataType(col.getDataType), false))
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 2d24abf..81a6bed 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -369,7 +369,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
       absoluteTableIdentifier: AbsoluteTableIdentifier): String = {
     val schemaEvolutionEntry = new schema.SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     removeTableFromMetadata(tableInfo.getDatabaseName, tableInfo.getFactTable.getTableName)
     CarbonMetadata.getInstance().loadTableMetadata(tableInfo)
     addTableCache(tableInfo, absoluteTableIdentifier)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
index 1300c22..2e6ebee 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
@@ -186,7 +186,7 @@ class CarbonHiveMetaStore extends CarbonFileMetastore {
       absoluteTableIdentifier: AbsoluteTableIdentifier): String = {
     val schemaEvolutionEntry = new schema.SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     CarbonUtil.convertToMultiGsonStrings(tableInfo, " ", "", ",")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index d85bf4b..9ae940b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -17,20 +17,30 @@
 
 package org.apache.carbondata.sdk.file;
 
+import java.io.DataInputStream;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.reader.CarbonIndexFileReader;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
+
 /**
  * Reader for carbondata file
  */
@@ -102,6 +112,59 @@ public class CarbonReader<T> {
   }
 
   /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return null, if the index file is not present in the path.
+   * List<ColumnSchema> from the index file.
+   * @throws IOException
+   */
+  public static List<ColumnSchema> readSchemaInIndexFile(String indexFilePath) throws IOException {
+    CarbonFile indexFile =
+        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
+    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
+      throw new IOException("Not an index file name");
+    }
+    // read schema from the first index file
+    DataInputStream dataInputStream =
+        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
+    byte[] bytes = new byte[(int) indexFile.getSize()];
+    try {
+      //get the file in byte buffer
+      dataInputStream.readFully(bytes);
+      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
+      // read from byte buffer.
+      indexReader.openThriftReader(bytes);
+      // get the index header
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
+      }
+      return columnSchemaList;
+    } finally {
+      dataInputStream.close();
+    }
+  }
+
+  /**
+   * Read CarbonData file and return the user schema,
+   * the schema order is the same as user save schema
+   */
+  public static List<ColumnSchema> readUserSchema(String indexFilePath) throws IOException {
+    List<ColumnSchema> columnSchemas = readSchemaInIndexFile(indexFilePath);
+    Collections.sort(columnSchemas, new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+    return columnSchemas;
+  }
+
+  /**
    * Read schema file and return table info object
    */
   public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 756dbe4..30d4091 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -17,14 +17,15 @@
 
 package org.apache.carbondata.sdk.file;
 
-import java.io.File;
-import java.io.FileFilter;
-import java.io.FilenameFilter;
-import java.io.IOException;
+import java.io.*;
 import java.sql.Date;
 import java.sql.Timestamp;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 
+import org.apache.avro.generic.GenericData;
+import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -36,10 +37,9 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import junit.framework.TestCase;
 import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.commons.lang.CharEncoding;
+import org.junit.*;
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
 
 public class CarbonReaderTest extends TestCase {
 
@@ -371,7 +371,8 @@ public class CarbonReaderTest extends TestCase {
     Assert.assertNotNull(dataFiles);
     Assert.assertTrue(dataFiles.length > 0);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
         .projection(new String[]{
             "stringField"
             , "shortField"
@@ -381,7 +382,343 @@ public class CarbonReaderTest extends TestCase {
             , "boolField"
             , "dateField"
             , "timeField"
-            , "decimalField"}).build();
+            , "decimalField"})
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadSchemaFileAndSort() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles = new File(path + "/Metadata").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("schema");
+      }
+    });
+    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
+
+    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
+
+    // sort the schema
+    Collections.sort(tableInfo.getFactTable().getListOfColumns(), new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    Assert.assertNotNull(dataFiles);
+    Assert.assertTrue(dataFiles.length > 0);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadSchemaInDataFileAndSort() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("carbondata");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
+
+    // sort the schema
+    Collections.sort(columns, new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadUserSchema() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("carbonindex");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
 
     int i = 0;
     while (reader.hasNext()) {
@@ -510,4 +847,101 @@ public class CarbonReaderTest extends TestCase {
       assert(row.length==0);
     }
   }
+
+  private void WriteAvroComplexData(String mySchema, String json, String[] sortColumns, String path)
+      throws IOException, InvalidLoadOptionException {
+
+    // conversion to GenericData.Record
+    org.apache.avro.Schema nn = new org.apache.avro.Schema.Parser().parse(mySchema);
+    JsonAvroConverter converter = new JsonAvroConverter();
+    GenericData.Record record = converter.convertToGenericDataRecord(
+        json.getBytes(CharEncoding.UTF_8), nn);
+
+    try {
+      CarbonWriter writer = CarbonWriter.builder()
+          .outputPath(path)
+          .isTransactionalTable(true)
+          .buildWriterForAvroInput(nn);
+
+      for (int i = 0; i < 100; i++) {
+        writer.write(record);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw e;
+    }
+  }
+
+  // TODO: support get schema of complex data type
+  @Ignore
+  public void testReadUserSchemaOfComplex() throws IOException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    String mySchema =
+        "{" +
+            "  \"name\": \"address\", " +
+            "   \"type\": \"record\", " +
+            "    \"fields\": [  " +
+            "  { \"name\": \"name\", \"type\": \"string\"}, " +
+            "  { \"name\": \"age\", \"type\": \"int\"}, " +
+            "  { " +
+            "    \"name\": \"address\", " +
+            "      \"type\": { " +
+            "    \"type\" : \"record\", " +
+            "        \"name\" : \"my_address\", " +
+            "        \"fields\" : [ " +
+            "    {\"name\": \"street\", \"type\": \"string\"}, " +
+            "    {\"name\": \"city\", \"type\": \"string\"} " +
+            "  ]} " +
+            "  }, " +
+            "  {\"name\" :\"doorNum\", " +
+            "   \"type\" : { " +
+            "   \"type\" :\"array\", " +
+            "   \"items\":{ " +
+            "   \"name\" :\"EachdoorNums\", " +
+            "   \"type\" : \"int\", " +
+            "   \"default\":-1} " +
+            "              } " +
+            "  }] " +
+            "}";
+
+    String json = "{\"name\":\"bob\", \"age\":10, \"address\" : {\"street\":\"abc\", \"city\":\"bang\"}, "
+        + "   \"doorNum\" : [1,2,3,4]}";
+
+    try {
+      WriteAvroComplexData(mySchema, json, null, path);
+    } catch (InvalidLoadOptionException e) {
+      e.printStackTrace();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
+      @Override
+      public boolean accept(File pathname) {
+        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
+      }
+    });
+    Assert.assertNotNull(dataFiles);
+    Assert.assertEquals(1, dataFiles.length);
+
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override
+      public boolean accept(File dir, String name) {
+        return name.endsWith("carbonindex");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+
+    for (int i = 0; i < columns.size(); i++) {
+      System.out.println(columns.get(i).getColumnName() + "\t" + columns.get(i).getSchemaOrdinal());
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
+
 }