You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/03/30 05:12:20 UTC

[12/13] incubator-carbondata git commit: Removed kettle related code and refactored

Removed kettle related code and refactored

Removed carbonplugins

Added back method

Fixed test

Fixed test


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e6b60907
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e6b60907
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e6b60907

Branch: refs/heads/master
Commit: e6b60907f0be2efd89884b81490a112ef71fd9cd
Parents: 3d5cf45
Author: ravipesala <ra...@gmail.com>
Authored: Sun Mar 26 16:10:47 2017 +0530
Committer: jackylk <ja...@huawei.com>
Committed: Thu Mar 30 10:34:14 2017 +0530

----------------------------------------------------------------------
 conf/carbon.properties.template                 |    2 -
 conf/dataload.properties.template               |    7 -
 core/pom.xml                                    |   15 -
 .../core/constants/CarbonCommonConstants.java   |    7 -
 .../dataholder/CarbonWriteDataHolder.java       |    5 +-
 .../executor/impl/AbstractQueryExecutor.java    |    2 +-
 .../core/scan/executor/util/QueryUtil.java      |    2 +-
 .../core/scan/wrappers/ByteArrayWrapper.java    |    9 +
 .../apache/carbondata/core/util/CarbonUtil.java |   39 -
 .../core/writer/HierarchyValueWriterForCSV.java |  318 ---
 .../store/impl/DFSFileHolderImplUnitTest.java   |    2 -
 .../store/impl/FileHolderImplUnitTest.java      |    3 +-
 .../scan/complextypes/ArrayQueryTypeTest.java   |    4 +-
 .../scan/expression/ExpressionResultTest.java   |    4 +-
 .../DriverQueryStatisticsRecorderImplTest.java  |    2 +-
 .../carbondata/core/util/CarbonUtilTest.java    |   56 +-
 .../carbondata/examples/CarbonExample.scala     |    3 -
 .../carbondata/examples/util/ExampleUtils.scala |    7 -
 .../examples/CarbonSessionExample.scala         |    1 -
 .../examples/SparkSessionExample.scala          |    1 -
 .../hadoop/ft/CarbonInputMapperTest.java        |    3 +-
 .../hadoop/test/util/StoreCreator.java          |  242 +--
 integration/spark-common-test/pom.xml           |    1 -
 .../dataload/TestLoadDataGeneral.scala          |    2 +-
 .../allqueries/DoubleDataTypeTest.scala         |    2 +-
 .../dataload/TestLoadDataWithHiveSyntax.scala   |    3 +-
 .../carbondata/spark/load/CarbonLoaderUtil.java |  134 --
 .../spark/merger/RowResultMerger.java           |    2 +-
 .../spark/merger/TupleConversionAdapter.java    |   14 +-
 .../merger/exeception/SliceMergerException.java |   78 +
 .../carbondata/spark/util/LoadMetadataUtil.java |    7 +-
 .../apache/carbondata/spark/CarbonOption.scala  |    2 -
 .../spark/rdd/CarbonDataLoadRDD.scala           |  754 -------
 .../carbondata/spark/rdd/CarbonScanRDD.scala    |    3 -
 .../apache/carbondata/spark/rdd/Compactor.scala |    2 -
 .../spark/rdd/DataManagementFunc.scala          |    4 -
 .../spark/rdd/NewCarbonDataLoadRDD.scala        |   86 +-
 .../carbondata/spark/rdd/UpdateDataLoad.scala   |    5 +-
 .../carbondata/spark/util/CarbonScalaUtil.scala |   52 -
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala |    2 +-
 .../execution/command/carbonTableSchema.scala   |    2 -
 .../spark/sql/test/TestQueryExecutor.scala      |    1 -
 .../scala/org/apache/spark/util/SparkUtil.scala |    2 +-
 integration/spark/pom.xml                       |    1 -
 .../spark/CarbonDataFrameWriter.scala           |   12 +-
 .../spark/rdd/CarbonDataRDDFactory.scala        |  112 +-
 .../execution/command/carbonTableSchema.scala   |   35 +-
 .../spark/sql/test/SparkTestQueryExecutor.scala |    4 +-
 .../BadRecordLoggerSharedDictionaryTest.scala   |   12 -
 .../dataload/TestLoadDataWithSinglePass.scala   |   10 +-
 integration/spark2/pom.xml                      |    1 -
 .../spark/rdd/CarbonDataRDDFactory.scala        |  178 +-
 .../spark/sql/CarbonDataFrameWriter.scala       |    1 -
 .../execution/command/carbonTableSchema.scala   |   34 +-
 .../sql/test/Spark2TestQueryExecutor.scala      |    1 -
 .../org/apache/spark/util/TableLoader.scala     |    6 -
 .../bucketing/TableBucketingTestCase.scala      |   10 +-
 .../vectorreader/VectorReaderTestCase.scala     |    2 +-
 pom.xml                                         |    2 -
 .../carbonplugins/.kettle/kettle.properties     |   10 -
 .../plugin.xml                                  |   28 -
 .../carbonautoagggraphgenerator/plugin.xml      |   28 -
 .../steps/carbonautoaggslicemerger/plugin.xml   |   28 -
 .../steps/carboncsvbasedseqgen/plugin.xml       |   29 -
 .../plugins/steps/carboncsvreader/plugin.xml    |   29 -
 .../steps/carboncsvreaderstrep/plugin.xml       |   29 -
 .../plugins/steps/carbondatawriter/plugin.xml   |   27 -
 .../plugins/steps/carbonfactreader/plugin.xml   |   28 -
 .../plugins/steps/carbongroupby/plugin.xml      |   27 -
 .../steps/carboninmemoryfactreader/plugin.xml   |   27 -
 .../plugins/steps/carbonseqgen/plugin.xml       |   28 -
 .../plugins/steps/carbonslicemerger/plugin.xml  |   28 -
 .../steps/carbonsortkeyandgroupby/plugin.xml    |   28 -
 .../plugins/steps/mdkeygenstep/plugin.xml       |   28 -
 .../plugins/steps/sortkeystep/plugin.xml        |   27 -
 processing/pom.xml                              |   26 -
 .../processing/csvload/BlockDetails.java        |   84 +
 .../processing/csvload/DataGraphExecuter.java   |  475 -----
 .../processing/csvload/GraphExecutionUtil.java  |  242 ---
 .../processing/csvreaderstep/BlockDetails.java  |   85 -
 .../csvreaderstep/BoundedDataStream.java        |  124 --
 .../processing/csvreaderstep/CsvInput.java      |  431 ----
 .../processing/csvreaderstep/CsvInputData.java  |   47 -
 .../processing/csvreaderstep/CsvInputMeta.java  |  971 ---------
 .../processing/csvreaderstep/RddInputUtils.java |   42 -
 .../csvreaderstep/RddInpututilsForUpdate.java   |   41 -
 .../csvreaderstep/UnivocityCsvParser.java       |  224 --
 .../csvreaderstep/UnivocityCsvParserVo.java     |  220 --
 .../csvreaderstep/step-attributes.xml           |  229 --
 .../dataprocessor/DataProcessTaskStatus.java    |  301 ---
 .../dataprocessor/IDataProcessStatus.java       |  194 --
 .../queue/impl/RecordComparator.java            |   43 -
 .../holder/DataProcessorRecordHolder.java       |   38 -
 .../processing/datatypes/ArrayDataType.java     |   29 -
 .../processing/datatypes/GenericDataType.java   |   18 -
 .../processing/datatypes/PrimitiveDataType.java |   25 -
 .../processing/datatypes/StructDataType.java    |   38 -
 .../exception/CarbonDataProcessorException.java |   78 -
 .../graphgenerator/GraphGenerator.java          |  965 ---------
 .../graphgenerator/GraphGeneratorConstants.java |   74 -
 .../graphgenerator/GraphGeneratorException.java |   79 -
 .../configuration/GraphConfigurationInfo.java   |  894 --------
 .../processing/mdkeygen/MDKeyGenStep.java       |  531 -----
 .../processing/mdkeygen/MDKeyGenStepData.java   |   37 -
 .../processing/mdkeygen/MDKeyGenStepMeta.java   |  529 -----
 .../processing/mdkeygen/file/FileData.java      |   72 -
 .../processing/mdkeygen/file/FileManager.java   |   59 -
 .../mdkeygen/file/IFileManagerComposite.java    |   57 -
 .../mdkeygen/messages/messages_en_US.properties |   22 -
 .../merger/exeception/SliceMergerException.java |   78 -
 .../merger/step/CarbonSliceMergerStep.java      |  159 --
 .../merger/step/CarbonSliceMergerStepData.java  |   41 -
 .../merger/step/CarbonSliceMergerStepMeta.java  |  470 ----
 .../sort/impl/ParallelReadMergeSorterImpl.java  |    2 +-
 ...arallelReadMergeSorterWithBucketingImpl.java |    3 +-
 .../steps/DataWriterProcessorStepImpl.java      |    2 +-
 .../schema/metadata/ArrayWrapper.java           |   62 -
 .../schema/metadata/ColumnSchemaDetails.java    |  100 -
 .../metadata/ColumnSchemaDetailsWrapper.java    |  108 -
 .../processing/schema/metadata/ColumnsInfo.java |  430 ----
 .../schema/metadata/HierarchiesInfo.java        |   51 -
 .../processing/schema/metadata/TableOption.java |   80 -
 .../schema/metadata/TableOptionWrapper.java     |  104 -
 .../sortdata/IntermediateFileMerger.java        |   85 +-
 .../sortandgroupby/sortdata/SortDataRows.java   |  104 +-
 .../sortandgroupby/sortdata/SortParameters.java |   15 -
 .../sortdata/SortTempFileChunkHolder.java       |  136 +-
 .../sortdatastep/SortKeyStep.java               |  283 ---
 .../sortdatastep/SortKeyStepData.java           |   51 -
 .../sortdatastep/SortKeyStepMeta.java           |  490 -----
 .../store/CarbonFactDataHandlerColumnar.java    |  296 +--
 .../store/CarbonFactDataHandlerModel.java       |   14 -
 .../store/SingleThreadFinalSortFilesMerger.java |    8 +-
 .../processing/store/file/FileData.java         |   52 +
 .../processing/store/file/FileManager.java      |   59 +
 .../store/file/IFileManagerComposite.java       |   57 +
 .../store/writer/AbstractFactDataWriter.java    |    2 +-
 .../store/writer/CarbonDataWriterVo.java        |    2 +-
 .../CarbonCSVBasedDimSurrogateKeyGen.java       |  495 -----
 .../csvbased/CarbonCSVBasedSeqGenData.java      |  136 --
 .../csvbased/CarbonCSVBasedSeqGenMeta.java      | 1436 -------------
 .../csvbased/CarbonCSVBasedSeqGenStep.java      | 2009 ------------------
 .../FileStoreSurrogateKeyGenForCSV.java         |  339 ---
 .../dbbased/messages/messages_en_US.properties  |   61 -
 .../util/CarbonDataProcessorUtil.java           |   53 -
 .../processing/util/CarbonSchemaParser.java     | 1076 ----------
 .../hadoopfileinput/HadoopFileInputMeta.java    |   27 -
 .../messages/messages_en_US.properties          |   18 -
 .../carbon/datastore/BlockIndexStoreTest.java   |    2 +-
 .../carbondata/lcm/locks/LocalFileLockTest.java |    2 +-
 .../carbondata/processing/StoreCreator.java     |  506 +++++
 .../carbondata/test/util/StoreCreator.java      |  539 -----
 152 files changed, 1164 insertions(+), 18945 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/conf/carbon.properties.template
----------------------------------------------------------------------
diff --git a/conf/carbon.properties.template b/conf/carbon.properties.template
index ac2d20e..b5f5101 100644
--- a/conf/carbon.properties.template
+++ b/conf/carbon.properties.template
@@ -23,8 +23,6 @@ carbon.storelocation=hdfs://hacluster/Opt/CarbonStore
 carbon.ddl.base.hdfs.url=hdfs://hacluster/opt/data
 #Path where the bad records are stored
 carbon.badRecords.location=/opt/Carbon/Spark/badrecords
-#Mandatory. path to kettle home
-carbon.kettle.home=$<SPARK_HOME>/carbonlib/carbonplugins
 
 #################### Performance Configuration ##################
 ######## DataLoading Configuration ########

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/conf/dataload.properties.template
----------------------------------------------------------------------
diff --git a/conf/dataload.properties.template b/conf/dataload.properties.template
index cfafb4c..cab712a 100644
--- a/conf/dataload.properties.template
+++ b/conf/dataload.properties.template
@@ -20,13 +20,6 @@
 # you should change to the code path of your local machine
 carbon.storelocation=/home/david/Documents/incubator-carbondata/examples/spark2/target/store
 
-#true: use kettle to load data
-#false: use new flow to load data
-use_kettle=true
-
-# you should change to the code path of your local machine
-carbon.kettle.home=/home/david/Documents/incubator-carbondata/processing/carbonplugins
-
 #csv delimiter character
 delimiter=,
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 5e46af3..976f8d2 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -45,21 +45,6 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>pentaho-kettle</groupId>
-      <artifactId>kettle-engine</artifactId>
-      <version>${kettle.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>pentaho-kettle</groupId>
-      <artifactId>kettle-core</artifactId>
-      <version>${kettle.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>pentaho-kettle</groupId>
-      <artifactId>kettle-db</artifactId>
-      <version>${kettle.version}</version>
-    </dependency>
-    <dependency>
       <groupId>com.google.code.gson</groupId>
       <artifactId>gson</artifactId>
       <version>2.3.1</version>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index d13bc79..789c321 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -69,10 +69,6 @@ public final class CarbonCommonConstants {
    */
   public static final String STORE_LOCATION_DEFAULT_VAL = "../carbon.store";
   /**
-   * the folder name of kettle home path
-   */
-  public static final String KETTLE_HOME_NAME = "carbonplugins";
-  /**
    * CARDINALITY_INCREMENT_DEFAULT_VALUE
    */
   public static final int CARDINALITY_INCREMENT_VALUE_DEFAULT_VAL = 10;
@@ -1174,9 +1170,6 @@ public final class CarbonCommonConstants {
 
   public static final String LOCAL_FILE_PREFIX = "file://";
 
-  public static final String USE_KETTLE = "use_kettle";
-
-  public static final String USE_KETTLE_DEFAULT = "false";
   public static final String CARBON_CUSTOM_BLOCK_DISTRIBUTION = "carbon.custom.block.distribution";
   public static final String CARBON_CUSTOM_BLOCK_DISTRIBUTION_DEFAULT = "false";
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/datastore/dataholder/CarbonWriteDataHolder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/dataholder/CarbonWriteDataHolder.java b/core/src/main/java/org/apache/carbondata/core/datastore/dataholder/CarbonWriteDataHolder.java
index e1aa0a0..fb21d95 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/dataholder/CarbonWriteDataHolder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/dataholder/CarbonWriteDataHolder.java
@@ -34,7 +34,7 @@ public class CarbonWriteDataHolder {
   private byte[][] byteValues;
 
   /**
-   * byteValues for no dictionary and non kettle flow.
+   * byteValues for no dictionary.
    */
   private byte[][][] byteValuesForNonDictionary;
 
@@ -72,7 +72,6 @@ public class CarbonWriteDataHolder {
 
   /**
    * Method to initialise double array
-   * TODO Remove after kettle flow got removed.
    *
    * @param size
    */
@@ -90,7 +89,7 @@ public class CarbonWriteDataHolder {
    *
    * @param size
    */
-  public void initialiseByteArrayValuesWithOutKettle(int size) {
+  public void initialiseByteArrayValuesForKey(int size) {
     if (size < 1) {
       throw new IllegalArgumentException("Invalid array size");
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index 2a5c342..1ddb3e6 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -345,7 +345,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
     blockExecutionInfo
         .setFixedLengthKeySize(getKeySize(currentBlockQueryDimensions, segmentProperties));
     Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
-    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
+    Set<Integer> noDictionaryColumnBlockIndex = new HashSet<Integer>();
     // get the block index to be read from file for query dimension
     // for both dictionary columns and no dictionary columns
     QueryUtil.fillQueryDimensionsBlockIndexes(currentBlockQueryDimensions,

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
index 5a98e44..be5e8a4 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
@@ -607,7 +607,7 @@ public class QueryUtil {
    */
   public static void fillQueryDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
       Map<Integer, Integer> columnOrdinalToBlockIndexMapping,
-      Set<Integer> dictionaryDimensionBlockIndex, List<Integer> noDictionaryDimensionBlockIndex) {
+      Set<Integer> dictionaryDimensionBlockIndex, Set<Integer> noDictionaryDimensionBlockIndex) {
     for (QueryDimension queryDimension : queryDimensions) {
       if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.DICTIONARY)
           && queryDimension.getDimension().numberOfChild() == 0) {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/scan/wrappers/ByteArrayWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/wrappers/ByteArrayWrapper.java b/core/src/main/java/org/apache/carbondata/core/scan/wrappers/ByteArrayWrapper.java
index ff82fc6..93bf8eb 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/wrappers/ByteArrayWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/wrappers/ByteArrayWrapper.java
@@ -82,6 +82,15 @@ public class ByteArrayWrapper implements Comparable<ByteArrayWrapper> {
   /**
    * to get the no dictionary column data
    *
+   * @return no dictionary keys
+   */
+  public byte[][] getNoDictionaryKeys() {
+    return this.noDictionaryKeys;
+  }
+
+  /**
+   * to get the no dictionary column data
+   *
    * @param index of the no dictionary key
    * @return no dictionary key for the index
    */

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 6d510f6..bf8c03b 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -23,12 +23,10 @@ import java.io.ByteArrayOutputStream;
 import java.io.Closeable;
 import java.io.DataInputStream;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.ObjectInputStream;
 import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
 import java.nio.charset.Charset;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -83,7 +81,6 @@ import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TCompactProtocol;
 import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.transport.TIOStreamTransport;
-import org.pentaho.di.core.exception.KettleException;
 
 public final class CarbonUtil {
 
@@ -631,42 +628,6 @@ public final class CarbonUtil {
     return cardinality;
   }
 
-  public static void writeLevelCardinalityFile(String loadFolderLoc, String tableName,
-      int[] dimCardinality) throws KettleException {
-    String levelCardinalityFilePath =
-        loadFolderLoc + File.separator + CarbonCommonConstants.LEVEL_METADATA_FILE + tableName
-            + CarbonCommonConstants.CARBON_METADATA_EXTENSION;
-    FileOutputStream fileOutputStream = null;
-    FileChannel channel = null;
-    try {
-      int dimCardinalityArrLength = dimCardinality.length;
-
-      // first four bytes for writing the length of array, remaining for array data
-      ByteBuffer buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE
-          + dimCardinalityArrLength * CarbonCommonConstants.INT_SIZE_IN_BYTE);
-
-      fileOutputStream = new FileOutputStream(levelCardinalityFilePath);
-      channel = fileOutputStream.getChannel();
-      buffer.putInt(dimCardinalityArrLength);
-
-      for (int i = 0; i < dimCardinalityArrLength; i++) {
-        buffer.putInt(dimCardinality[i]);
-      }
-
-      buffer.flip();
-      channel.write(buffer);
-      buffer.clear();
-
-      LOGGER.info("Level cardinality file written to : " + levelCardinalityFilePath);
-    } catch (IOException e) {
-      LOGGER.error("Error while writing level cardinality file : " + levelCardinalityFilePath + e
-          .getMessage());
-      throw new KettleException("Not able to write level cardinality file", e);
-    } finally {
-      closeStreams(channel, fileOutputStream);
-    }
-  }
-
   /**
    * From beeline if a delimeter is passed as \001, in code we get it as
    * escaped string as \\001. So this method will unescape the slash again and

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/main/java/org/apache/carbondata/core/writer/HierarchyValueWriterForCSV.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/HierarchyValueWriterForCSV.java b/core/src/main/java/org/apache/carbondata/core/writer/HierarchyValueWriterForCSV.java
deleted file mode 100644
index 1e427aa..0000000
--- a/core/src/main/java/org/apache/carbondata/core/writer/HierarchyValueWriterForCSV.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.writer;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-import org.pentaho.di.core.exception.KettleException;
-
-public class HierarchyValueWriterForCSV {
-
-  /**
-   * Comment for <code>LOGGER</code>
-   */
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(HierarchyValueWriterForCSV.class.getName());
-  /**
-   * hierarchyName
-   */
-  private String hierarchyName;
-
-  /**
-   * bufferedOutStream
-   */
-  private FileChannel outPutFileChannel;
-
-  /**
-   * storeFolderLocation
-   */
-  private String storeFolderLocation;
-
-  /**
-   * intialized
-   */
-  private boolean intialized;
-
-  /**
-   * counter the number of files.
-   */
-  private int counter;
-
-  /**
-   * byteArrayList
-   */
-  private List<ByteArrayHolder> byteArrayholder =
-      new ArrayList<ByteArrayHolder>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-
-  /**
-   * toflush
-   */
-  private int toflush;
-
-  public HierarchyValueWriterForCSV(String hierarchy, String storeFolderLocation) {
-    this.hierarchyName = hierarchy;
-    this.storeFolderLocation = storeFolderLocation;
-
-    CarbonProperties instance = CarbonProperties.getInstance();
-
-    this.toflush = Integer.parseInt(instance
-        .getProperty(CarbonCommonConstants.SORT_SIZE, CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL));
-
-    int rowSetSize = Integer.parseInt(instance.getProperty(CarbonCommonConstants.GRAPH_ROWSET_SIZE,
-        CarbonCommonConstants.GRAPH_ROWSET_SIZE_DEFAULT));
-
-    if (this.toflush > rowSetSize) {
-      this.toflush = rowSetSize;
-    }
-
-    updateCounter(hierarchy, storeFolderLocation);
-  }
-
-  /**
-   * @return Returns the byteArrayList.
-   */
-  public List<ByteArrayHolder> getByteArrayList() {
-    return byteArrayholder;
-  }
-
-  public FileChannel getBufferedOutStream() {
-    return outPutFileChannel;
-  }
-
-  private void updateCounter(final String meString, String storeFolderLocation) {
-    File storeFolder = new File(storeFolderLocation);
-
-    File[] listFiles = storeFolder.listFiles(new FileFilter() {
-
-      @Override public boolean accept(File file) {
-        if (file.getName().indexOf(meString) > -1)
-
-        {
-          return true;
-        }
-        return false;
-      }
-    });
-
-    if (null == listFiles || listFiles.length == 0) {
-      counter = 0;
-      return;
-    }
-
-    for (File hierFile : listFiles) {
-      String hierFileName = hierFile.getName();
-
-      if (hierFileName.endsWith(CarbonCommonConstants.FILE_INPROGRESS_STATUS)) {
-        hierFileName = hierFileName.substring(0, hierFileName.lastIndexOf('.'));
-        try {
-          counter = Integer.parseInt(hierFileName.substring(hierFileName.length() - 1));
-        } catch (NumberFormatException nfe) {
-
-          if (new File(hierFileName + '0' + CarbonCommonConstants.LEVEL_FILE_EXTENSION).exists()) {
-            // Need to skip because the case can come in which server went down while files were
-            // merging and the other hierarchy files were not deleted, and the current file
-            // status is inrogress. so again we will merge the files and rename to normal file
-            LOGGER.info("Need to skip as this can be case in which hierarchy file already renamed");
-            if (hierFile.delete()) {
-              LOGGER.info("Deleted the Inprogress hierarchy Files.");
-            }
-          } else {
-            // levelfileName0.level file not exist that means files is merged and other
-            // files got deleted. while renaming this file from inprogress to normal file,
-            // server got restarted/killed. so we need to rename the file to normal.
-
-            File inprogressFile = new File(storeFolder + File.separator + hierFile.getName());
-            File changetoName = new File(storeFolder + File.separator + hierFileName);
-
-            if (inprogressFile.renameTo(changetoName)) {
-              LOGGER.info(
-                  "Renaming the level Files while creating the new instance on server startup.");
-            }
-
-          }
-
-        }
-      }
-
-      String val = hierFileName.substring(hierFileName.length() - 1);
-
-      int parsedVal = getIntValue(val);
-
-      if (counter < parsedVal) {
-        counter = parsedVal;
-      }
-    }
-    counter++;
-  }
-
-  private int getIntValue(String val) {
-    int parsedVal = 0;
-    try {
-      parsedVal = Integer.parseInt(val);
-    } catch (NumberFormatException nfe) {
-      LOGGER.info("Hierarchy File is already renamed so there will not be"
-              + "any need to keep the counter");
-    }
-    return parsedVal;
-  }
-
-  private void intialize() throws KettleException {
-    intialized = true;
-
-    File f = new File(storeFolderLocation + File.separator + hierarchyName + counter
-        + CarbonCommonConstants.FILE_INPROGRESS_STATUS);
-
-    counter++;
-
-    FileOutputStream fos = null;
-
-    boolean isFileCreated = false;
-    if (!f.exists()) {
-      try {
-        isFileCreated = f.createNewFile();
-
-      } catch (IOException e) {
-        //not required: findbugs fix
-        throw new KettleException("unable to create member mapping file", e);
-      }
-      if (!isFileCreated) {
-        throw new KettleException("unable to create file" + f.getAbsolutePath());
-      }
-    }
-
-    try {
-      fos = new FileOutputStream(f);
-
-      outPutFileChannel = fos.getChannel();
-    } catch (FileNotFoundException e) {
-      closeStreamAndDeleteFile(f, outPutFileChannel, fos);
-      throw new KettleException("member Mapping File not found to write mapping info", e);
-    }
-  }
-
-  public void writeIntoHierarchyFile(byte[] bytes, int primaryKey) throws KettleException {
-    if (!intialized) {
-      intialize();
-    }
-
-    ByteBuffer byteBuffer = storeValueInCache(bytes, primaryKey);
-
-    try {
-      byteBuffer.flip();
-      outPutFileChannel.write(byteBuffer);
-    } catch (IOException e) {
-      throw new KettleException("Error while writting in the hierarchy mapping file", e);
-    }
-  }
-
-  private ByteBuffer storeValueInCache(byte[] bytes, int primaryKey) {
-
-    // adding 4 to store the total length of the row at the beginning
-    ByteBuffer buffer = ByteBuffer.allocate(bytes.length + 4);
-
-    buffer.put(bytes);
-    buffer.putInt(primaryKey);
-
-    return buffer;
-  }
-
-  public void performRequiredOperation() throws KettleException {
-    if (byteArrayholder.size() == 0) {
-      return;
-    }
-    //write to the file and close the stream.
-    Collections.sort(byteArrayholder);
-
-    for (ByteArrayHolder byteArray : byteArrayholder) {
-      writeIntoHierarchyFile(byteArray.getMdKey(), byteArray.getPrimaryKey());
-    }
-
-    CarbonUtil.closeStreams(outPutFileChannel);
-
-    //rename the inprogress file to normal .level file
-    String filePath = this.storeFolderLocation + File.separator + hierarchyName + (counter - 1)
-        + CarbonCommonConstants.FILE_INPROGRESS_STATUS;
-    File inProgressFile = new File(filePath);
-    String inprogressFileName = inProgressFile.getName();
-
-    String changedFileName = inprogressFileName.substring(0, inprogressFileName.lastIndexOf('.'));
-
-    File orgFinalName = new File(this.storeFolderLocation + File.separator + changedFileName);
-
-    if (!inProgressFile.renameTo(orgFinalName)) {
-      LOGGER.error("Not able to rename file : " + inprogressFileName);
-    }
-
-    //create the new outputStream
-    try {
-      intialize();
-    } catch (KettleException e) {
-      LOGGER.error("Not able to create output stream for file:" + hierarchyName + (counter - 1));
-    }
-
-    //clear the byte array holder also.
-    byteArrayholder.clear();
-  }
-
-  private void closeStreamAndDeleteFile(File f, Closeable... streams) {
-    boolean isDeleted = false;
-    for (Closeable stream : streams) {
-      if (null != stream) {
-        try {
-          stream.close();
-        } catch (IOException e) {
-          LOGGER.error(e, "unable to close the stream ");
-        }
-
-      }
-    }
-
-    // delete the file
-    isDeleted = f.delete();
-    if (!isDeleted) {
-      LOGGER.error("Unable to delete the file " + f.getAbsolutePath());
-    }
-
-  }
-
-  public String getHierarchyName() {
-    return hierarchyName;
-  }
-
-  public int getCounter() {
-    return counter;
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/DFSFileHolderImplUnitTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/DFSFileHolderImplUnitTest.java b/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/DFSFileHolderImplUnitTest.java
index fed712e..5c51c87 100644
--- a/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/DFSFileHolderImplUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/DFSFileHolderImplUnitTest.java
@@ -35,9 +35,7 @@ import org.junit.Test;
 
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
-import static org.pentaho.di.core.util.Assert.assertNull;
 
 public class DFSFileHolderImplUnitTest {
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/FileHolderImplUnitTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/FileHolderImplUnitTest.java b/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/FileHolderImplUnitTest.java
index da722cf..ed50d63 100644
--- a/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/FileHolderImplUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/carbon/datastorage/filesystem/store/impl/FileHolderImplUnitTest.java
@@ -35,9 +35,8 @@ import org.junit.Test;
 
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertThat;
-import static org.pentaho.di.core.util.Assert.assertNull;
 
 public class FileHolderImplUnitTest {
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/test/java/org/apache/carbondata/core/scan/complextypes/ArrayQueryTypeTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/complextypes/ArrayQueryTypeTest.java b/core/src/test/java/org/apache/carbondata/core/scan/complextypes/ArrayQueryTypeTest.java
index 9f37f78..e882f4e 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/complextypes/ArrayQueryTypeTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/complextypes/ArrayQueryTypeTest.java
@@ -21,8 +21,8 @@ import java.nio.ByteBuffer;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import static org.pentaho.di.core.util.Assert.assertNotNull;
-import static org.pentaho.di.core.util.Assert.assertNull;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
 
 public class ArrayQueryTypeTest {
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/test/java/org/apache/carbondata/core/scan/expression/ExpressionResultTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/ExpressionResultTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/ExpressionResultTest.java
index 30c8236..2cdb46e 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/ExpressionResultTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/ExpressionResultTest.java
@@ -35,8 +35,8 @@ import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.core.Is.is;
 import static org.hamcrest.core.IsEqual.equalTo;
 import static org.junit.Assert.assertEquals;
-import static org.pentaho.di.core.util.Assert.assertFalse;
-import static org.pentaho.di.core.util.Assert.assertNull;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
 
 public class ExpressionResultTest {
   private static final double DELTA = 1e-15;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/test/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImplTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImplTest.java b/core/src/test/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImplTest.java
index 90cae83..bb892a9 100644
--- a/core/src/test/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImplTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/stats/DriverQueryStatisticsRecorderImplTest.java
@@ -23,7 +23,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 import static junit.framework.TestCase.assertNotNull;
-import static org.pentaho.di.core.util.Assert.assertNull;
+import static org.junit.Assert.assertNull;
 
 public class DriverQueryStatisticsRecorderImplTest {
   private static DriverQueryStatisticsRecorderImpl driverQueryStatisticsRecorderImpl = null;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
index 9beaac7..9adf4d4 100644
--- a/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/util/CarbonUtilTest.java
@@ -16,12 +16,26 @@
  */
 package org.apache.carbondata.core.util;
 
-import mockit.Mock;
-import mockit.MockUp;
+import java.io.BufferedReader;
+import java.io.DataInputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
 
-import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionDataChunk;
+import org.apache.carbondata.core.datastore.columnar.ColumnGroupModel;
+import org.apache.carbondata.core.datastore.compression.WriterCompressModel;
+import org.apache.carbondata.core.datastore.filesystem.LocalCarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk;
 import org.apache.carbondata.core.metadata.datatype.DataType;
@@ -29,29 +43,20 @@ import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.datastore.columnar.ColumnGroupModel;
-import org.apache.carbondata.core.datastore.compression.WriterCompressModel;
-import org.apache.carbondata.core.datastore.filesystem.LocalCarbonFile;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.scan.model.QueryDimension;
 
+import mockit.Mock;
+import mockit.MockUp;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.pentaho.di.core.exception.KettleException;
-
-import java.io.*;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
 
-import static org.hamcrest.MatcherAssert.assertThat;
-import static junit.framework.TestCase.*;
+import static junit.framework.TestCase.assertEquals;
+import static junit.framework.TestCase.assertTrue;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
 
 public class CarbonUtilTest {
 
@@ -300,23 +305,6 @@ public class CarbonUtilTest {
     assertEquals(result, 5);
   }
 
-  @Test public void testToWriteLevelCardinalityFile() throws KettleException {
-    int[] dimCardinality = { 10, 20, 30, 40 };
-    CarbonUtil.writeLevelCardinalityFile("../core/src/test/resources/testDatabase", "testTable",
-        dimCardinality);
-    assertTrue(new File("../core/src/test/resources/testDatabase/levelmetadata_testTable.metadata")
-        .exists());
-  }
-
-  @Test public void testToGetCardinalityFromLevelMetadataFile()
-      throws IOException, InterruptedException {
-    int[] cardinality = CarbonUtil.getCardinalityFromLevelMetadataFile(
-        "../core/src/test/resources/testDatabase/levelmetadata_testTable.metadata");
-    int[] expectedCardinality = { 10, 20, 30, 40 };
-    for (int i = 0; i < cardinality.length; i++) {
-      assertEquals(cardinality[i], expectedCardinality[i]);
-    }
-  }
 
   @Test public void testToGetCardinalityFromLevelMetadataFileForInvalidPath()
       throws IOException, InterruptedException {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
index b1d9e41..36013a8 100644
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
@@ -41,9 +41,6 @@ object CarbonExample {
            STORED BY 'carbondata'
            """)
 
-    // Currently there are two data loading flows in CarbonData, one uses Kettle as ETL tool
-    // in each node to do data loading, another uses a multi-thread framework without Kettle (See
-    // AbstractDataLoadProcessorStep)
     // Load data
     cc.sql(s"""
            LOAD DATA LOCAL INPATH '$testData' into table t3

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
index 41912bc..f98ec3b 100644
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
@@ -31,7 +31,6 @@ object ExampleUtils {
   def currentPath: String = new File(this.getClass.getResource("/").getPath + "../../")
       .getCanonicalPath
   val storeLocation = currentPath + "/target/store"
-  val kettleHome = new File(currentPath + "/../../processing/carbonplugins").getCanonicalPath
 
   def createCarbonContext(appName: String): CarbonContext = {
     val sc = new SparkContext(new SparkConf()
@@ -44,12 +43,7 @@ object ExampleUtils {
     val cc = new CarbonContext(sc, storeLocation, currentPath + "/target/carbonmetastore")
 
     CarbonProperties.getInstance()
-      .addProperty("carbon.kettle.home", kettleHome)
       .addProperty("carbon.storelocation", storeLocation)
-    // whether use table split partition
-    // true -> use table split partition, support multiple partition loading
-    // false -> use node split partition, support data load by host partition
-    CarbonProperties.getInstance().addProperty("carbon.table.split.partition.enable", "false")
     cc
   }
 
@@ -90,7 +84,6 @@ object ExampleUtils {
       .format("carbondata")
       .option("tableName", tableName)
       .option("compress", "true")
-      .option("use_kettle", "false")
       .option("tempCSV", "false")
       .mode(mode)
       .save()

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
index 1c3b7f0..2ae9341 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
@@ -34,7 +34,6 @@ object CarbonSessionExample {
     val metastoredb = s"$rootPath/examples/spark2/target"
 
     CarbonProperties.getInstance()
-      .addProperty("carbon.kettle.home", s"$rootPath/processing/carbonplugins")
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
 
     import org.apache.spark.sql.CarbonSession._

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
index 381b2e8..63e680a 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
@@ -54,7 +54,6 @@ object SparkSessionExample {
         .getOrCreate()
 
     CarbonProperties.getInstance()
-      .addProperty("carbon.kettle.home", s"$rootPath/processing/carbonplugins")
       .addProperty("carbon.storelocation", storeLocation)
 
     spark.sparkContext.setLogLevel("WARN")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java
----------------------------------------------------------------------
diff --git a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java
index b55875f..0d751fc 100644
--- a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java
+++ b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputMapperTest.java
@@ -28,11 +28,11 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.hadoop.CarbonInputFormat;
 import org.apache.carbondata.hadoop.CarbonProjection;
-import org.apache.carbondata.hadoop.test.util.StoreCreator;
 import org.apache.carbondata.core.scan.expression.ColumnExpression;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.expression.LiteralExpression;
 import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression;
+import org.apache.carbondata.hadoop.test.util.StoreCreator;
 
 import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
@@ -87,6 +87,7 @@ public class CarbonInputMapperTest extends TestCase {
       Assert.assertEquals("Count lines are not matching", 1000, countTheLines(outPath));
       Assert.assertEquals("Column count are not matching", 3, countTheColumns(outPath));
     } catch (Exception e) {
+      e.printStackTrace();
       Assert.assertTrue("failed", false);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java b/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java
index 532cd43..51ce2c5 100644
--- a/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java
+++ b/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java
@@ -16,19 +16,37 @@
  */
 package org.apache.carbondata.hadoop.test.util;
 
-import com.google.gson.Gson;
-import org.apache.hadoop.fs.Path;
-
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.nio.charset.Charset;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
 import org.apache.carbondata.core.cache.CacheType;
 import org.apache.carbondata.core.cache.dictionary.Dictionary;
 import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.fileoperations.AtomicFileOperations;
+import org.apache.carbondata.core.fileoperations.AtomicFileOperationsImpl;
+import org.apache.carbondata.core.fileoperations.FileWriteOperation;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.processing.model.CarbonDataLoadSchema;
+import org.apache.carbondata.core.metadata.CarbonMetadata;
 import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
 import org.apache.carbondata.core.metadata.ColumnIdentifier;
-import org.apache.carbondata.core.metadata.CarbonMetadata;
 import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.datatype.DataType;
@@ -42,14 +60,11 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.util.path.CarbonStorePath;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.processing.csvreaderstep.BlockDetails;
 import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonStorePath;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
 import org.apache.carbondata.core.writer.CarbonDictionaryWriterImpl;
 import org.apache.carbondata.core.writer.ThriftWriter;
@@ -57,25 +72,29 @@ import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWrit
 import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriterImpl;
 import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfo;
 import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfoPreparator;
-import org.apache.carbondata.core.fileoperations.AtomicFileOperations;
-import org.apache.carbondata.core.fileoperations.AtomicFileOperationsImpl;
-import org.apache.carbondata.core.fileoperations.FileWriteOperation;
-import org.apache.carbondata.processing.api.dataloader.DataLoadModel;
 import org.apache.carbondata.processing.api.dataloader.SchemaInfo;
 import org.apache.carbondata.processing.constants.TableOptionConstant;
-import org.apache.carbondata.processing.csvload.DataGraphExecuter;
-import org.apache.carbondata.processing.dataprocessor.DataProcessTaskStatus;
-import org.apache.carbondata.processing.dataprocessor.IDataProcessStatus;
-import org.apache.carbondata.processing.graphgenerator.GraphGenerator;
-import org.apache.carbondata.processing.graphgenerator.GraphGeneratorException;
+import org.apache.carbondata.processing.csvload.BlockDetails;
+import org.apache.carbondata.processing.csvload.CSVInputFormat;
+import org.apache.carbondata.processing.csvload.CSVRecordReaderIterator;
+import org.apache.carbondata.processing.csvload.StringArrayWritable;
+import org.apache.carbondata.processing.model.CarbonDataLoadSchema;
+import org.apache.carbondata.processing.model.CarbonLoadModel;
+import org.apache.carbondata.processing.newflow.DataLoadExecutor;
+import org.apache.carbondata.processing.newflow.constants.DataLoadProcessorConstants;
 
-import java.io.*;
-import java.nio.charset.Charset;
-import java.text.SimpleDateFormat;
-import java.util.*;
+import com.google.gson.Gson;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.TaskAttemptID;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 
 /**
  * This class will create store file based on provided schema
+ *
  */
 public class StoreCreator {
 
@@ -104,26 +123,52 @@ public class StoreCreator {
 
     try {
 
-      String factFilePath = new File("src/test/resources/data.csv").getCanonicalPath();
+      String factFilePath = new File("../hadoop/src/test/resources/data.csv").getCanonicalPath();
       File storeDir = new File(absoluteTableIdentifier.getStorePath());
       CarbonUtil.deleteFoldersAndFiles(storeDir);
       CarbonProperties.getInstance().addProperty(CarbonCommonConstants.STORE_LOCATION_HDFS,
           absoluteTableIdentifier.getStorePath());
 
-      String kettleHomePath = "../processing/carbonplugins";
       CarbonTable table = createTable();
       writeDictionary(factFilePath, table);
       CarbonDataLoadSchema schema = new CarbonDataLoadSchema(table);
-      LoadModel loadModel = new LoadModel();
+      CarbonLoadModel loadModel = new CarbonLoadModel();
       String partitionId = "0";
-      loadModel.setSchema(schema);
+      loadModel.setCarbonDataLoadSchema(schema);
       loadModel.setDatabaseName(absoluteTableIdentifier.getCarbonTableIdentifier().getDatabaseName());
       loadModel.setTableName(absoluteTableIdentifier.getCarbonTableIdentifier().getTableName());
       loadModel.setTableName(absoluteTableIdentifier.getCarbonTableIdentifier().getTableName());
       loadModel.setFactFilePath(factFilePath);
       loadModel.setLoadMetadataDetails(new ArrayList<LoadMetadataDetails>());
+      loadModel.setStorePath(absoluteTableIdentifier.getStorePath());
+      loadModel.setDateFormat(null);
+      loadModel.setDefaultTimestampFormat(CarbonProperties.getInstance().getProperty(
+          CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
+      loadModel.setDefaultDateFormat(CarbonProperties.getInstance().getProperty(
+          CarbonCommonConstants.CARBON_DATE_FORMAT,
+          CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT));
+      loadModel
+          .setSerializationNullFormat(
+              TableOptionConstant.SERIALIZATION_NULL_FORMAT.getName() + "," + "\\N");
+      loadModel
+          .setBadRecordsLoggerEnable(
+              TableOptionConstant.BAD_RECORDS_LOGGER_ENABLE.getName() + "," + "false");
+      loadModel
+          .setBadRecordsAction(
+              TableOptionConstant.BAD_RECORDS_ACTION.getName() + "," + "FORCE");
+      loadModel
+          .setIsEmptyDataBadRecord(
+              DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD + "," + "false");
+      loadModel.setCsvHeader("ID,date,country,name,phonetype,serialname,salary");
+      loadModel.setCsvHeaderColumns(loadModel.getCsvHeader().split(","));
+      loadModel.setTaskNo("0");
+      loadModel.setSegmentId("0");
+      loadModel.setPartitionId("0");
+      loadModel.setFactTimeStamp(System.currentTimeMillis());
+
+      executeGraph(loadModel, absoluteTableIdentifier.getStorePath());
 
-      executeGraph(loadModel, absoluteTableIdentifier.getStorePath(), kettleHomePath);
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -311,12 +356,10 @@ public class StoreCreator {
    *
    * @param loadModel
    * @param storeLocation
-   * @param kettleHomePath
    * @throws Exception
    */
-  public static void executeGraph(LoadModel loadModel, String storeLocation, String kettleHomePath)
+  public static void executeGraph(CarbonLoadModel loadModel, String storeLocation)
       throws Exception {
-    System.setProperty("KETTLE_HOME", kettleHomePath);
     new File(storeLocation).mkdirs();
     String outPutLoc = storeLocation + "/etl";
     String databaseName = loadModel.getDatabaseName();
@@ -344,35 +387,39 @@ public class StoreCreator {
       path.delete();
     }
 
-    DataProcessTaskStatus dataProcessTaskStatus = new DataProcessTaskStatus(databaseName, tableName);
-    dataProcessTaskStatus.setCsvFilePath(loadModel.getFactFilePath());
     SchemaInfo info = new SchemaInfo();
     BlockDetails blockDetails = new BlockDetails(new Path(loadModel.getFactFilePath()),
         0, new File(loadModel.getFactFilePath()).length(), new String[] {"localhost"});
-    GraphGenerator.blockInfo.put("qwqwq", new BlockDetails[] { blockDetails });
-    dataProcessTaskStatus.setBlocksID("qwqwq");
-    dataProcessTaskStatus.setEscapeCharacter("\\");
-    dataProcessTaskStatus.setQuoteCharacter("\"");
-    dataProcessTaskStatus.setCommentCharacter("#");
-    dataProcessTaskStatus.setDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    Configuration configuration = new Configuration();
+    CSVInputFormat.setCommentCharacter(configuration, loadModel.getCommentChar());
+    CSVInputFormat.setCSVDelimiter(configuration, loadModel.getCsvDelimiter());
+    CSVInputFormat.setEscapeCharacter(configuration, loadModel.getEscapeChar());
+    CSVInputFormat.setHeaderExtractionEnabled(configuration, true);
+    CSVInputFormat.setQuoteCharacter(configuration, loadModel.getQuoteChar());
+    CSVInputFormat.setReadBufferSize(configuration, CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CSV_READ_BUFFER_SIZE,
+            CarbonCommonConstants.CSV_READ_BUFFER_SIZE_DEFAULT));
+
+    TaskAttemptContextImpl hadoopAttemptContext = new TaskAttemptContextImpl(configuration, new TaskAttemptID("", 1, TaskType.MAP, 0, 0));
+    CSVInputFormat format = new CSVInputFormat();
+
+    RecordReader<NullWritable, StringArrayWritable> recordReader =
+        format.createRecordReader(blockDetails, hadoopAttemptContext);
+
+    CSVRecordReaderIterator readerIterator = new CSVRecordReaderIterator(recordReader, blockDetails, hadoopAttemptContext);
+    new DataLoadExecutor().execute(loadModel,
+        storeLocation,
+        new CarbonIterator[]{readerIterator});
+
     info.setDatabaseName(databaseName);
     info.setTableName(tableName);
-    info.setSerializationNullFormat(
-        TableOptionConstant.SERIALIZATION_NULL_FORMAT.getName() + "," + "\\N");
-    info.setBadRecordsLoggerEnable(
-        TableOptionConstant.BAD_RECORDS_LOGGER_ENABLE.getName() + "," + "false");
-    info.setBadRecordsLoggerAction(
-        TableOptionConstant.BAD_RECORDS_ACTION.getName() + "," + "force");
-
-    generateGraph(dataProcessTaskStatus, info, loadModel.getTableName(), "0", loadModel.getSchema(), null,
-        loadModel.getLoadMetadataDetails());
-
-    DataGraphExecuter graphExecuter = new DataGraphExecuter(dataProcessTaskStatus);
-    graphExecuter
-        .executeGraph(graphPath, info, loadModel.getSchema());
+
+//    DataGraphExecuter graphExecuter = new DataGraphExecuter(dataProcessTaskStatus);
+//    graphExecuter
+//        .executeGraph(graphPath, info, loadModel.getSchema());
     //    LoadMetadataDetails[] loadDetails =
     //        CarbonUtil.readLoadMetadata(loadModel.schema.getCarbonTable().getMetaDataFilepath());
-    writeLoadMetadata(loadModel.schema, loadModel.getTableName(), loadModel.getTableName(),
+    writeLoadMetadata(loadModel.getCarbonDataLoadSchema(), loadModel.getTableName(), loadModel.getTableName(),
         new ArrayList<LoadMetadataDetails>());
 
     String segLocation =
@@ -403,6 +450,7 @@ public class StoreCreator {
   public static void writeLoadMetadata(CarbonDataLoadSchema schema, String databaseName,
       String tableName, List<LoadMetadataDetails> listOfLoadFolderDetails) throws IOException {
     LoadMetadataDetails loadMetadataDetails = new LoadMetadataDetails();
+    loadMetadataDetails.setLoadEndTime(System.currentTimeMillis());
     loadMetadataDetails.setLoadStatus("SUCCESS");
     loadMetadataDetails.setLoadName(String.valueOf(0));
     loadMetadataDetails.setLoadStartTime(loadMetadataDetails.getTimeStamp(readCurrentTime()));
@@ -442,40 +490,6 @@ public class StoreCreator {
 
   }
 
-  /**
-   * generate graph
-   *
-   * @param dataProcessTaskStatus
-   * @param info
-   * @param tableName
-   * @param partitionID
-   * @param schema
-   * @param factStoreLocation
-   * @param loadMetadataDetails
-   * @throws GraphGeneratorException
-   */
-  private static void generateGraph(IDataProcessStatus dataProcessTaskStatus, SchemaInfo info,
-      String tableName, String partitionID, CarbonDataLoadSchema schema, String factStoreLocation,
-      List<LoadMetadataDetails> loadMetadataDetails)
-      throws GraphGeneratorException {
-    DataLoadModel model = new DataLoadModel();
-    model.setCsvLoad(null != dataProcessTaskStatus.getCsvFilePath() || null != dataProcessTaskStatus.getFilesToProcess());
-    model.setSchemaInfo(info);
-    model.setTableName(dataProcessTaskStatus.getTableName());
-    model.setTaskNo("1");
-    model.setBlocksID(dataProcessTaskStatus.getBlocksID());
-    model.setFactTimeStamp(System.currentTimeMillis() + "");
-    model.setEscapeCharacter(dataProcessTaskStatus.getEscapeCharacter());
-    model.setQuoteCharacter(dataProcessTaskStatus.getQuoteCharacter());
-    model.setCommentCharacter(dataProcessTaskStatus.getCommentCharacter());
-    model.setDateFormat(dataProcessTaskStatus.getDateFormat());
-    String outputLocation = CarbonProperties.getInstance()
-        .getProperty("store_output_location", "../carbon-store/system/carbon/etl");
-    GraphGenerator generator =
-        new GraphGenerator(model, partitionID, factStoreLocation, schema, "0", outputLocation);
-    generator.generateGraph();
-  }
-
   public static String readCurrentTime() {
     SimpleDateFormat sdf = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP);
     String date = null;
@@ -485,60 +499,6 @@ public class StoreCreator {
     return date;
   }
 
-  /**
-   * This is local model object used inside this class to store information related to data loading
-   */
-  private static class LoadModel {
-
-    private CarbonDataLoadSchema schema;
-    private String tableName;
-    private String databaseName;
-    private List<LoadMetadataDetails> loadMetaDetail;
-    private String factFilePath;
-
-    public void setSchema(CarbonDataLoadSchema schema) {
-      this.schema = schema;
-    }
-
-    public List<LoadMetadataDetails> getLoadMetadataDetails() {
-      return loadMetaDetail;
-    }
-
-    public CarbonDataLoadSchema getSchema() {
-      return schema;
-    }
-
-    public String getFactFilePath() {
-      return factFilePath;
-    }
-
-    public String getTableName() {
-      return tableName;
-    }
-
-    public String getDatabaseName() {
-      return databaseName;
-    }
-
-    public void setLoadMetadataDetails(List<LoadMetadataDetails> loadMetaDetail) {
-      this.loadMetaDetail = loadMetaDetail;
-    }
-
-    public void setFactFilePath(String factFilePath) {
-      this.factFilePath = factFilePath;
-    }
-
-    public void setTableName(String tableName) {
-      this.tableName = tableName;
-    }
-
-    public void setDatabaseName(String databaseName) {
-      this.databaseName = databaseName;
-    }
-
-
-  }
-
   public static void main(String[] args) {
     StoreCreator.createCarbonStore();
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/integration/spark-common-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/pom.xml b/integration/spark-common-test/pom.xml
index d92bc94..f9bfaa0 100644
--- a/integration/spark-common-test/pom.xml
+++ b/integration/spark-common-test/pom.xml
@@ -147,7 +147,6 @@
           </environmentVariables>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
-            <use_kettle>${use.kettle}</use_kettle>
           </systemProperties>
         </configuration>
         <executions>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
index aa18b8f..58b49d4 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
@@ -136,7 +136,7 @@ class TestLoadDataGeneral extends QueryTest with BeforeAndAfterAll {
         STORED BY 'org.apache.carbondata.format' """)
     val testData = s"$resourcesPath/sample.csv"
     try {
-      sql(s"LOAD DATA LOCAL INPATH '$testData' into table load_test_singlepass options ('USE_KETTLE'='FALSE','SINGLE_PASS'='TRUE')")
+      sql(s"LOAD DATA LOCAL INPATH '$testData' into table load_test_singlepass options ('SINGLE_PASS'='TRUE')")
     } catch {
       case ex: Exception =>
         assert(false)

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
index 052ee45..7df7fcd 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
@@ -25,7 +25,7 @@ class DoubleDataTypeTest extends QueryTest with BeforeAndAfterAll {
   override def beforeAll: Unit = {
     sql("DROP TABLE IF EXISTS oscon_carbon_old")
     sql("""create table oscon_carbon_old (CUST_PRFRD_FLG String,PROD_BRAND_NAME String,PROD_COLOR String,CUST_LAST_RVW_DATE String,CUST_COUNTRY String,CUST_CITY String,PRODUCT_NAME String,CUST_JOB_TITLE String,CUST_STATE String,CUST_BUY_POTENTIAL String,PRODUCT_MODEL String,ITM_ID String,ITM_NAME String,PRMTION_ID String,PRMTION_NAME String,SHP_MODE_ID String,SHP_MODE String,DELIVERY_COUNTRY String,DELIVERY_STATE String,DELIVERY_CITY String,DELIVERY_DISTRICT String,ACTIVE_EMUI_VERSION String,WH_NAME String,STR_ORDER_DATE String,OL_ORDER_NO String,OL_ORDER_DATE String,OL_SITE String,CUST_FIRST_NAME String,CUST_LAST_NAME String,CUST_BIRTH_DY String,CUST_BIRTH_MM String,CUST_BIRTH_YR String,CUST_BIRTH_COUNTRY String,CUST_SEX String,CUST_ADDRESS_ID String,CUST_STREET_NO String,CUST_STREET_NAME String,CUST_AGE String,CUST_SUITE_NO String,CUST_ZIP String,CUST_COUNTY String,PRODUCT_ID String,PROD_SHELL_COLOR String,DEVICE_NAME String,PROD_SHORT_DESC String,PROD_LONG_DESC String,PROD_THUMB 
 String,PROD_IMAGE String,PROD_UPDATE_DATE String,PROD_LIVE String,PROD_LOC String,PROD_RAM String,PROD_ROM String,PROD_CPU_CLOCK String,PROD_SERIES String,ITM_REC_START_DATE String,ITM_REC_END_DATE String,ITM_BRAND_ID String,ITM_BRAND String,ITM_CLASS_ID String,ITM_CLASS String,ITM_CATEGORY_ID String,ITM_CATEGORY String,ITM_MANUFACT_ID String,ITM_MANUFACT String,ITM_FORMULATION String,ITM_COLOR String,ITM_CONTAINER String,ITM_MANAGER_ID String,PRM_START_DATE String,PRM_END_DATE String,PRM_CHANNEL_DMAIL String,PRM_CHANNEL_EMAIL String,PRM_CHANNEL_CAT String,PRM_CHANNEL_TV String,PRM_CHANNEL_RADIO String,PRM_CHANNEL_PRESS String,PRM_CHANNEL_EVENT String,PRM_CHANNEL_DEMO String,PRM_CHANNEL_DETAILS String,PRM_PURPOSE String,PRM_DSCNT_ACTIVE String,SHP_CODE String,SHP_CARRIER String,SHP_CONTRACT String,CHECK_DATE String,CHECK_YR String,CHECK_MM String,CHECK_DY String,CHECK_HOUR String,BOM String,INSIDE_NAME String,PACKING_DATE String,PACKING_YR String,PACKING_MM String,PACKING_DY String,
 PACKING_HOUR String,DELIVERY_PROVINCE String,PACKING_LIST_NO String,ACTIVE_CHECK_TIME String,ACTIVE_CHECK_YR String,ACTIVE_CHECK_MM String,ACTIVE_CHECK_DY String,ACTIVE_CHECK_HOUR String,ACTIVE_AREA_ID String,ACTIVE_COUNTRY String,ACTIVE_PROVINCE String,ACTIVE_CITY String,ACTIVE_DISTRICT String,ACTIVE_NETWORK String,ACTIVE_FIRMWARE_VER String,ACTIVE_OS_VERSION String,LATEST_CHECK_TIME String,LATEST_CHECK_YR String,LATEST_CHECK_MM String,LATEST_CHECK_DY String,LATEST_CHECK_HOUR String,LATEST_AREAID String,LATEST_COUNTRY String,LATEST_PROVINCE String,LATEST_CITY String,LATEST_DISTRICT String,LATEST_FIRMWARE_VER String,LATEST_EMUI_VERSION String,LATEST_OS_VERSION String,LATEST_NETWORK String,WH_ID String,WH_STREET_NO String,WH_STREET_NAME String,WH_STREET_TYPE String,WH_SUITE_NO String,WH_CITY String,WH_COUNTY String,WH_STATE String,WH_ZIP String,WH_COUNTRY String,OL_SITE_DESC String,OL_RET_ORDER_NO String,OL_RET_DATE String,PROD_MODEL_ID String,CUST_ID String,PROD_UNQ_MDL_ID String,CU
 ST_NICK_NAME String,CUST_LOGIN String,CUST_EMAIL_ADDR String,PROD_UNQ_DEVICE_ADDR String,PROD_UQ_UUID String,PROD_BAR_CODE String,TRACKING_NO String,STR_ORDER_NO String,CUST_DEP_COUNT double,CUST_VEHICLE_COUNT double,CUST_ADDRESS_CNT double,CUST_CRNT_CDEMO_CNT double,CUST_CRNT_HDEMO_CNT double,CUST_CRNT_ADDR_DM double,CUST_FIRST_SHIPTO_CNT double,CUST_FIRST_SALES_CNT double,CUST_GMT_OFFSET double,CUST_DEMO_CNT double,CUST_INCOME double,PROD_UNLIMITED double,PROD_OFF_PRICE double,PROD_UNITS double,TOTAL_PRD_COST double,TOTAL_PRD_DISC double,PROD_WEIGHT double,REG_UNIT_PRICE double,EXTENDED_AMT double,UNIT_PRICE_DSCNT_PCT double,DSCNT_AMT double,PROD_STD_CST double,TOTAL_TX_AMT double,FREIGHT_CHRG double,WAITING_PERIOD double,DELIVERY_PERIOD double,ITM_CRNT_PRICE double,ITM_UNITS double,ITM_WSLE_CST double,ITM_SIZE double,PRM_CST double,PRM_RESPONSE_TARGET double,PRM_ITM_DM double,SHP_MODE_CNT double,WH_GMT_OFFSET double,WH_SQ_FT double,STR_ORD_QTY double,STR_WSLE_CST double,STR_LIST_
 PRICE double,STR_SALES_PRICE double,STR_EXT_DSCNT_AMT double,STR_EXT_SALES_PRICE double,STR_EXT_WSLE_CST double,STR_EXT_LIST_PRICE double,STR_EXT_TX double,STR_COUPON_AMT double,STR_NET_PAID double,STR_NET_PAID_INC_TX double,STR_NET_PRFT double,STR_SOLD_YR_CNT double,STR_SOLD_MM_CNT double,STR_SOLD_ITM_CNT double,STR_TOTAL_CUST_CNT double,STR_AREA_CNT double,STR_DEMO_CNT double,STR_OFFER_CNT double,STR_PRM_CNT double,STR_TICKET_CNT double,STR_NET_PRFT_DM_A double,STR_NET_PRFT_DM_B double,STR_NET_PRFT_DM_C double,STR_NET_PRFT_DM_D double,STR_NET_PRFT_DM_E double,STR_RET_STR_ID double,STR_RET_REASON_CNT double,STR_RET_TICKET_NO double,STR_RTRN_QTY double,STR_RTRN_AMT double,STR_RTRN_TX double,STR_RTRN_AMT_INC_TX double,STR_RET_FEE double,STR_RTRN_SHIP_CST double,STR_RFNDD_CSH double,STR_REVERSED_CHRG double,STR_STR_CREDIT double,STR_RET_NET_LOSS double,STR_RTRNED_YR_CNT double,STR_RTRN_MM_CNT double,STR_RET_ITM_CNT double,STR_RET_CUST_CNT double,STR_RET_AREA_CNT double,STR_RET_OFFER_C
 NT double,STR_RET_PRM_CNT double,STR_RET_NET_LOSS_DM_A double,STR_RET_NET_LOSS_DM_B double,STR_RET_NET_LOSS_DM_C double,STR_RET_NET_LOSS_DM_D double,OL_ORD_QTY double,OL_WSLE_CST double,OL_LIST_PRICE double,OL_SALES_PRICE double,OL_EXT_DSCNT_AMT double,OL_EXT_SALES_PRICE double,OL_EXT_WSLE_CST double,OL_EXT_LIST_PRICE double,OL_EXT_TX double,OL_COUPON_AMT double,OL_EXT_SHIP_CST double,OL_NET_PAID double,OL_NET_PAID_INC_TX double,OL_NET_PAID_INC_SHIP double,OL_NET_PAID_INC_SHIP_TX double,OL_NET_PRFT double,OL_SOLD_YR_CNT double,OL_SOLD_MM_CNT double,OL_SHIP_DATE_CNT double,OL_ITM_CNT double,OL_BILL_CUST_CNT double,OL_BILL_AREA_CNT double,OL_BILL_DEMO_CNT double,OL_BILL_OFFER_CNT double,OL_SHIP_CUST_CNT double,OL_SHIP_AREA_CNT double,OL_SHIP_DEMO_CNT double,OL_SHIP_OFFER_CNT double,OL_WEB_PAGE_CNT double,OL_WEB_SITE_CNT double,OL_SHIP_MODE_CNT double,OL_WH_CNT double,OL_PRM_CNT double,OL_NET_PRFT_DM_A double,OL_NET_PRFT_DM_B double,OL_NET_PRFT_DM_C double,OL_NET_PRFT_DM_D double,OL_RE
 T_RTRN_QTY double,OL_RTRN_AMT double,OL_RTRN_TX double,OL_RTRN_AMT_INC_TX double,OL_RET_FEE double,OL_RTRN_SHIP_CST double,OL_RFNDD_CSH double,OL_REVERSED_CHRG double,OL_ACCOUNT_CREDIT double,OL_RTRNED_YR_CNT double,OL_RTRNED_MM_CNT double,OL_RTRITM_CNT double,OL_RFNDD_CUST_CNT double,OL_RFNDD_AREA_CNT double,OL_RFNDD_DEMO_CNT double,OL_RFNDD_OFFER_CNT double,OL_RTRNING_CUST_CNT double,OL_RTRNING_AREA_CNT double,OL_RTRNING_DEMO_CNT double,OL_RTRNING_OFFER_CNT double,OL_RTRWEB_PAGE_CNT double,OL_REASON_CNT double,OL_NET_LOSS double,OL_NET_LOSS_DM_A double,OL_NET_LOSS_DM_B double,OL_NET_LOSS_DM_C double) STORED BY 'org.apache.carbondata.format' tblproperties('DICTIONARY_EXCLUDE'='CUST_ID,CUST_NICK_NAME,CUST_FIRST_NAME,CUST_LAST_NAME,CUST_LOGIN,CUST_EMAIL_ADDR,PROD_UNQ_MDL_ID,PROD_UNQ_DEVICE_ADDR,PROD_UQ_UUID,DEVICE_NAME,PROD_BAR_CODE,ITM_ID,ITM_NAME,ITM_BRAND_ID,ITM_BRAND,BOM,PACKING_LIST_NO,TRACKING_NO,ACTIVE_FIRMWARE_VER,LATEST_FIRMWARE_VER,LATEST_EMUI_VERSION,LATEST_NETWORK,STR_ORD
 ER_NO','table_blocksize'='256')""")
-    sql(s"""load data LOCAL inpath '$resourcesPath/oscon_10.csv' into table oscon_carbon_old options('USE_KETTLE'='true','DELIMITER'=',', 'QUOTECHAR'='\"','FILEHEADER'='ACTIVE_AREA_ID, ACTIVE_CHECK_DY, ACTIVE_CHECK_HOUR, ACTIVE_CHECK_MM, ACTIVE_CHECK_TIME, ACTIVE_CHECK_YR, ACTIVE_CITY, ACTIVE_COUNTRY, ACTIVE_DISTRICT, ACTIVE_EMUI_VERSION, ACTIVE_FIRMWARE_VER, ACTIVE_NETWORK, ACTIVE_OS_VERSION, ACTIVE_PROVINCE, BOM, CHECK_DATE, CHECK_DY, CHECK_HOUR, CHECK_MM, CHECK_YR, CUST_ADDRESS_ID, CUST_AGE, CUST_BIRTH_COUNTRY, CUST_BIRTH_DY, CUST_BIRTH_MM, CUST_BIRTH_YR, CUST_BUY_POTENTIAL, CUST_CITY, CUST_STATE, CUST_COUNTRY, CUST_COUNTY, CUST_EMAIL_ADDR, CUST_LAST_RVW_DATE, CUST_FIRST_NAME, CUST_ID, CUST_JOB_TITLE, CUST_LAST_NAME, CUST_LOGIN, CUST_NICK_NAME, CUST_PRFRD_FLG, CUST_SEX, CUST_STREET_NAME, CUST_STREET_NO, CUST_SUITE_NO, CUST_ZIP, DELIVERY_CITY, DELIVERY_STATE, DELIVERY_COUNTRY, DELIVERY_DISTRICT, DELIVERY_PROVINCE, DEVICE_NAME, INSIDE_NAME, ITM_BRAND, ITM_BRAND_ID, ITM_CATEGORY, IT
 M_CATEGORY_ID, ITM_CLASS, ITM_CLASS_ID, ITM_COLOR, ITM_CONTAINER, ITM_FORMULATION, ITM_MANAGER_ID, ITM_MANUFACT, ITM_MANUFACT_ID, ITM_ID, ITM_NAME, ITM_REC_END_DATE, ITM_REC_START_DATE, LATEST_AREAID, LATEST_CHECK_DY, LATEST_CHECK_HOUR, LATEST_CHECK_MM, LATEST_CHECK_TIME, LATEST_CHECK_YR, LATEST_CITY, LATEST_COUNTRY, LATEST_DISTRICT, LATEST_EMUI_VERSION, LATEST_FIRMWARE_VER, LATEST_NETWORK, LATEST_OS_VERSION, LATEST_PROVINCE, OL_ORDER_DATE, OL_ORDER_NO, OL_RET_ORDER_NO, OL_RET_DATE, OL_SITE, OL_SITE_DESC, PACKING_DATE, PACKING_DY, PACKING_HOUR, PACKING_LIST_NO, PACKING_MM, PACKING_YR, PRMTION_ID, PRMTION_NAME, PRM_CHANNEL_CAT, PRM_CHANNEL_DEMO, PRM_CHANNEL_DETAILS, PRM_CHANNEL_DMAIL, PRM_CHANNEL_EMAIL, PRM_CHANNEL_EVENT, PRM_CHANNEL_PRESS, PRM_CHANNEL_RADIO, PRM_CHANNEL_TV, PRM_DSCNT_ACTIVE, PRM_END_DATE, PRM_PURPOSE, PRM_START_DATE, PRODUCT_ID, PROD_BAR_CODE, PROD_BRAND_NAME, PRODUCT_NAME, PRODUCT_MODEL, PROD_MODEL_ID, PROD_COLOR, PROD_SHELL_COLOR, PROD_CPU_CLOCK, PROD_IMAGE, PROD_
 LIVE, PROD_LOC, PROD_LONG_DESC, PROD_RAM, PROD_ROM, PROD_SERIES, PROD_SHORT_DESC, PROD_THUMB, PROD_UNQ_DEVICE_ADDR, PROD_UNQ_MDL_ID, PROD_UPDATE_DATE, PROD_UQ_UUID, SHP_CARRIER, SHP_CODE, SHP_CONTRACT, SHP_MODE_ID, SHP_MODE, STR_ORDER_DATE, STR_ORDER_NO, TRACKING_NO, WH_CITY, WH_COUNTRY, WH_COUNTY, WH_ID, WH_NAME, WH_STATE, WH_STREET_NAME, WH_STREET_NO, WH_STREET_TYPE, WH_SUITE_NO, WH_ZIP, CUST_DEP_COUNT, CUST_VEHICLE_COUNT, CUST_ADDRESS_CNT, CUST_CRNT_CDEMO_CNT, CUST_CRNT_HDEMO_CNT, CUST_CRNT_ADDR_DM, CUST_FIRST_SHIPTO_CNT, CUST_FIRST_SALES_CNT, CUST_GMT_OFFSET, CUST_DEMO_CNT, CUST_INCOME, PROD_UNLIMITED, PROD_OFF_PRICE, PROD_UNITS, TOTAL_PRD_COST, TOTAL_PRD_DISC, PROD_WEIGHT, REG_UNIT_PRICE, EXTENDED_AMT, UNIT_PRICE_DSCNT_PCT, DSCNT_AMT, PROD_STD_CST, TOTAL_TX_AMT, FREIGHT_CHRG, WAITING_PERIOD, DELIVERY_PERIOD, ITM_CRNT_PRICE, ITM_UNITS, ITM_WSLE_CST, ITM_SIZE, PRM_CST, PRM_RESPONSE_TARGET, PRM_ITM_DM, SHP_MODE_CNT, WH_GMT_OFFSET, WH_SQ_FT, STR_ORD_QTY, STR_WSLE_CST, STR_LIST_PRIC
 E, STR_SALES_PRICE, STR_EXT_DSCNT_AMT, STR_EXT_SALES_PRICE, STR_EXT_WSLE_CST, STR_EXT_LIST_PRICE, STR_EXT_TX, STR_COUPON_AMT, STR_NET_PAID, STR_NET_PAID_INC_TX, STR_NET_PRFT, STR_SOLD_YR_CNT, STR_SOLD_MM_CNT, STR_SOLD_ITM_CNT, STR_TOTAL_CUST_CNT, STR_AREA_CNT, STR_DEMO_CNT, STR_OFFER_CNT, STR_PRM_CNT, STR_TICKET_CNT, STR_NET_PRFT_DM_A, STR_NET_PRFT_DM_B, STR_NET_PRFT_DM_C, STR_NET_PRFT_DM_D, STR_NET_PRFT_DM_E, STR_RET_STR_ID, STR_RET_REASON_CNT, STR_RET_TICKET_NO, STR_RTRN_QTY, STR_RTRN_AMT, STR_RTRN_TX, STR_RTRN_AMT_INC_TX, STR_RET_FEE, STR_RTRN_SHIP_CST, STR_RFNDD_CSH, STR_REVERSED_CHRG, STR_STR_CREDIT, STR_RET_NET_LOSS, STR_RTRNED_YR_CNT, STR_RTRN_MM_CNT, STR_RET_ITM_CNT, STR_RET_CUST_CNT, STR_RET_AREA_CNT, STR_RET_OFFER_CNT, STR_RET_PRM_CNT, STR_RET_NET_LOSS_DM_A, STR_RET_NET_LOSS_DM_B, STR_RET_NET_LOSS_DM_C, STR_RET_NET_LOSS_DM_D, OL_ORD_QTY, OL_WSLE_CST, OL_LIST_PRICE, OL_SALES_PRICE, OL_EXT_DSCNT_AMT, OL_EXT_SALES_PRICE, OL_EXT_WSLE_CST, OL_EXT_LIST_PRICE, OL_EXT_TX, OL_COUPO
 N_AMT, OL_EXT_SHIP_CST, OL_NET_PAID, OL_NET_PAID_INC_TX, OL_NET_PAID_INC_SHIP, OL_NET_PAID_INC_SHIP_TX, OL_NET_PRFT, OL_SOLD_YR_CNT, OL_SOLD_MM_CNT, OL_SHIP_DATE_CNT, OL_ITM_CNT, OL_BILL_CUST_CNT, OL_BILL_AREA_CNT, OL_BILL_DEMO_CNT, OL_BILL_OFFER_CNT, OL_SHIP_CUST_CNT, OL_SHIP_AREA_CNT, OL_SHIP_DEMO_CNT, OL_SHIP_OFFER_CNT, OL_WEB_PAGE_CNT, OL_WEB_SITE_CNT, OL_SHIP_MODE_CNT, OL_WH_CNT, OL_PRM_CNT, OL_NET_PRFT_DM_A, OL_NET_PRFT_DM_B, OL_NET_PRFT_DM_C, OL_NET_PRFT_DM_D, OL_RET_RTRN_QTY, OL_RTRN_AMT, OL_RTRN_TX, OL_RTRN_AMT_INC_TX, OL_RET_FEE, OL_RTRN_SHIP_CST, OL_RFNDD_CSH, OL_REVERSED_CHRG, OL_ACCOUNT_CREDIT, OL_RTRNED_YR_CNT, OL_RTRNED_MM_CNT, OL_RTRITM_CNT, OL_RFNDD_CUST_CNT, OL_RFNDD_AREA_CNT, OL_RFNDD_DEMO_CNT, OL_RFNDD_OFFER_CNT, OL_RTRNING_CUST_CNT, OL_RTRNING_AREA_CNT, OL_RTRNING_DEMO_CNT, OL_RTRNING_OFFER_CNT, OL_RTRWEB_PAGE_CNT, OL_REASON_CNT, OL_NET_LOSS, OL_NET_LOSS_DM_A, OL_NET_LOSS_DM_B, OL_NET_LOSS_DM_C','BAD_RECORDS_ACTION'='FORCE','BAD_RECORDS_LOGGER_ENABLE'='FALSE')""
 ")
+    sql(s"""load data LOCAL inpath '$resourcesPath/oscon_10.csv' into table oscon_carbon_old options('DELIMITER'=',', 'QUOTECHAR'='\"','FILEHEADER'='ACTIVE_AREA_ID, ACTIVE_CHECK_DY, ACTIVE_CHECK_HOUR, ACTIVE_CHECK_MM, ACTIVE_CHECK_TIME, ACTIVE_CHECK_YR, ACTIVE_CITY, ACTIVE_COUNTRY, ACTIVE_DISTRICT, ACTIVE_EMUI_VERSION, ACTIVE_FIRMWARE_VER, ACTIVE_NETWORK, ACTIVE_OS_VERSION, ACTIVE_PROVINCE, BOM, CHECK_DATE, CHECK_DY, CHECK_HOUR, CHECK_MM, CHECK_YR, CUST_ADDRESS_ID, CUST_AGE, CUST_BIRTH_COUNTRY, CUST_BIRTH_DY, CUST_BIRTH_MM, CUST_BIRTH_YR, CUST_BUY_POTENTIAL, CUST_CITY, CUST_STATE, CUST_COUNTRY, CUST_COUNTY, CUST_EMAIL_ADDR, CUST_LAST_RVW_DATE, CUST_FIRST_NAME, CUST_ID, CUST_JOB_TITLE, CUST_LAST_NAME, CUST_LOGIN, CUST_NICK_NAME, CUST_PRFRD_FLG, CUST_SEX, CUST_STREET_NAME, CUST_STREET_NO, CUST_SUITE_NO, CUST_ZIP, DELIVERY_CITY, DELIVERY_STATE, DELIVERY_COUNTRY, DELIVERY_DISTRICT, DELIVERY_PROVINCE, DEVICE_NAME, INSIDE_NAME, ITM_BRAND, ITM_BRAND_ID, ITM_CATEGORY, ITM_CATEGORY_ID, ITM_C
 LASS, ITM_CLASS_ID, ITM_COLOR, ITM_CONTAINER, ITM_FORMULATION, ITM_MANAGER_ID, ITM_MANUFACT, ITM_MANUFACT_ID, ITM_ID, ITM_NAME, ITM_REC_END_DATE, ITM_REC_START_DATE, LATEST_AREAID, LATEST_CHECK_DY, LATEST_CHECK_HOUR, LATEST_CHECK_MM, LATEST_CHECK_TIME, LATEST_CHECK_YR, LATEST_CITY, LATEST_COUNTRY, LATEST_DISTRICT, LATEST_EMUI_VERSION, LATEST_FIRMWARE_VER, LATEST_NETWORK, LATEST_OS_VERSION, LATEST_PROVINCE, OL_ORDER_DATE, OL_ORDER_NO, OL_RET_ORDER_NO, OL_RET_DATE, OL_SITE, OL_SITE_DESC, PACKING_DATE, PACKING_DY, PACKING_HOUR, PACKING_LIST_NO, PACKING_MM, PACKING_YR, PRMTION_ID, PRMTION_NAME, PRM_CHANNEL_CAT, PRM_CHANNEL_DEMO, PRM_CHANNEL_DETAILS, PRM_CHANNEL_DMAIL, PRM_CHANNEL_EMAIL, PRM_CHANNEL_EVENT, PRM_CHANNEL_PRESS, PRM_CHANNEL_RADIO, PRM_CHANNEL_TV, PRM_DSCNT_ACTIVE, PRM_END_DATE, PRM_PURPOSE, PRM_START_DATE, PRODUCT_ID, PROD_BAR_CODE, PROD_BRAND_NAME, PRODUCT_NAME, PRODUCT_MODEL, PROD_MODEL_ID, PROD_COLOR, PROD_SHELL_COLOR, PROD_CPU_CLOCK, PROD_IMAGE, PROD_LIVE, PROD_LOC, PROD
 _LONG_DESC, PROD_RAM, PROD_ROM, PROD_SERIES, PROD_SHORT_DESC, PROD_THUMB, PROD_UNQ_DEVICE_ADDR, PROD_UNQ_MDL_ID, PROD_UPDATE_DATE, PROD_UQ_UUID, SHP_CARRIER, SHP_CODE, SHP_CONTRACT, SHP_MODE_ID, SHP_MODE, STR_ORDER_DATE, STR_ORDER_NO, TRACKING_NO, WH_CITY, WH_COUNTRY, WH_COUNTY, WH_ID, WH_NAME, WH_STATE, WH_STREET_NAME, WH_STREET_NO, WH_STREET_TYPE, WH_SUITE_NO, WH_ZIP, CUST_DEP_COUNT, CUST_VEHICLE_COUNT, CUST_ADDRESS_CNT, CUST_CRNT_CDEMO_CNT, CUST_CRNT_HDEMO_CNT, CUST_CRNT_ADDR_DM, CUST_FIRST_SHIPTO_CNT, CUST_FIRST_SALES_CNT, CUST_GMT_OFFSET, CUST_DEMO_CNT, CUST_INCOME, PROD_UNLIMITED, PROD_OFF_PRICE, PROD_UNITS, TOTAL_PRD_COST, TOTAL_PRD_DISC, PROD_WEIGHT, REG_UNIT_PRICE, EXTENDED_AMT, UNIT_PRICE_DSCNT_PCT, DSCNT_AMT, PROD_STD_CST, TOTAL_TX_AMT, FREIGHT_CHRG, WAITING_PERIOD, DELIVERY_PERIOD, ITM_CRNT_PRICE, ITM_UNITS, ITM_WSLE_CST, ITM_SIZE, PRM_CST, PRM_RESPONSE_TARGET, PRM_ITM_DM, SHP_MODE_CNT, WH_GMT_OFFSET, WH_SQ_FT, STR_ORD_QTY, STR_WSLE_CST, STR_LIST_PRICE, STR_SALES_PRICE, 
 STR_EXT_DSCNT_AMT, STR_EXT_SALES_PRICE, STR_EXT_WSLE_CST, STR_EXT_LIST_PRICE, STR_EXT_TX, STR_COUPON_AMT, STR_NET_PAID, STR_NET_PAID_INC_TX, STR_NET_PRFT, STR_SOLD_YR_CNT, STR_SOLD_MM_CNT, STR_SOLD_ITM_CNT, STR_TOTAL_CUST_CNT, STR_AREA_CNT, STR_DEMO_CNT, STR_OFFER_CNT, STR_PRM_CNT, STR_TICKET_CNT, STR_NET_PRFT_DM_A, STR_NET_PRFT_DM_B, STR_NET_PRFT_DM_C, STR_NET_PRFT_DM_D, STR_NET_PRFT_DM_E, STR_RET_STR_ID, STR_RET_REASON_CNT, STR_RET_TICKET_NO, STR_RTRN_QTY, STR_RTRN_AMT, STR_RTRN_TX, STR_RTRN_AMT_INC_TX, STR_RET_FEE, STR_RTRN_SHIP_CST, STR_RFNDD_CSH, STR_REVERSED_CHRG, STR_STR_CREDIT, STR_RET_NET_LOSS, STR_RTRNED_YR_CNT, STR_RTRN_MM_CNT, STR_RET_ITM_CNT, STR_RET_CUST_CNT, STR_RET_AREA_CNT, STR_RET_OFFER_CNT, STR_RET_PRM_CNT, STR_RET_NET_LOSS_DM_A, STR_RET_NET_LOSS_DM_B, STR_RET_NET_LOSS_DM_C, STR_RET_NET_LOSS_DM_D, OL_ORD_QTY, OL_WSLE_CST, OL_LIST_PRICE, OL_SALES_PRICE, OL_EXT_DSCNT_AMT, OL_EXT_SALES_PRICE, OL_EXT_WSLE_CST, OL_EXT_LIST_PRICE, OL_EXT_TX, OL_COUPON_AMT, OL_EXT_SHIP_C
 ST, OL_NET_PAID, OL_NET_PAID_INC_TX, OL_NET_PAID_INC_SHIP, OL_NET_PAID_INC_SHIP_TX, OL_NET_PRFT, OL_SOLD_YR_CNT, OL_SOLD_MM_CNT, OL_SHIP_DATE_CNT, OL_ITM_CNT, OL_BILL_CUST_CNT, OL_BILL_AREA_CNT, OL_BILL_DEMO_CNT, OL_BILL_OFFER_CNT, OL_SHIP_CUST_CNT, OL_SHIP_AREA_CNT, OL_SHIP_DEMO_CNT, OL_SHIP_OFFER_CNT, OL_WEB_PAGE_CNT, OL_WEB_SITE_CNT, OL_SHIP_MODE_CNT, OL_WH_CNT, OL_PRM_CNT, OL_NET_PRFT_DM_A, OL_NET_PRFT_DM_B, OL_NET_PRFT_DM_C, OL_NET_PRFT_DM_D, OL_RET_RTRN_QTY, OL_RTRN_AMT, OL_RTRN_TX, OL_RTRN_AMT_INC_TX, OL_RET_FEE, OL_RTRN_SHIP_CST, OL_RFNDD_CSH, OL_REVERSED_CHRG, OL_ACCOUNT_CREDIT, OL_RTRNED_YR_CNT, OL_RTRNED_MM_CNT, OL_RTRITM_CNT, OL_RFNDD_CUST_CNT, OL_RFNDD_AREA_CNT, OL_RFNDD_DEMO_CNT, OL_RFNDD_OFFER_CNT, OL_RTRNING_CUST_CNT, OL_RTRNING_AREA_CNT, OL_RTRNING_DEMO_CNT, OL_RTRNING_OFFER_CNT, OL_RTRWEB_PAGE_CNT, OL_REASON_CNT, OL_NET_LOSS, OL_NET_LOSS_DM_A, OL_NET_LOSS_DM_B, OL_NET_LOSS_DM_C','BAD_RECORDS_ACTION'='FORCE','BAD_RECORDS_LOGGER_ENABLE'='FALSE')""")
   }
 
   test("test to check result for double data type") {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e6b60907/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
index 8670d8f..353db9e 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
@@ -93,8 +93,7 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
         "row format delimited fields terminated by ','"
     )
 
-    sql(s"LOAD DATA local inpath '$resourcesPath/data.csv' INTO table smallinttable " +
-      "OPTIONS('USE_KETTLE'='false')")
+    sql(s"LOAD DATA local inpath '$resourcesPath/data.csv' INTO table smallinttable ")
     sql(s"LOAD DATA local inpath '$resourcesPath/datawithoutheader.csv' overwrite " +
       "INTO table smallinthivetable")