You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2020/02/24 15:54:27 UTC

[carbondata] branch master updated (66a9d3e -> 55a07da)

This is an automated email from the ASF dual-hosted git repository.

jackylk pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git.


    from 66a9d3e  [CARBONDATA-3688] Add compressor name in data file name
     new 85d333c  [Re-factory] Re-factory modules
     new 55a07da  [Re-factory] Re-factory modules

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 assembly/pom.xml                                   |    2 +-
 conf/dataload.properties.template                  |    2 +-
 datamap/bloom/pom.xml                              |   82 -
 datamap/examples/pom.xml                           |   83 -
 datamap/lucene/pom.xml                             |  145 --
 datamap/mv/core/pom.xml                            |  253 ---
 .../carbondata/mv/plans/ModularToSQLSuite.scala    |  148 --
 .../carbondata/mv/plans/SignatureSuite.scala       |   77 -
 .../carbondata/mv/rewrite/MVCreateTestCase.scala   | 1295 -----------
 .../mv/rewrite/MVFilterAndJoinTest.scala           |   70 -
 .../mv/rewrite/MVIncrementalLoadingTestcase.scala  |  642 ------
 .../carbondata/mv/rewrite/MVSampleTestCase.scala   |  156 --
 .../carbondata/mv/rewrite/MVTPCDSTestCase.scala    |  132 --
 .../carbondata/mv/rewrite/MVTpchTestCase.scala     |  222 --
 .../mv/rewrite/TestAllOperationsOnMV.scala         |  625 ------
 .../mv/rewrite/TestPartitionWithMV.scala           |  703 ------
 .../carbondata/mv/rewrite/TestSQLSuite.scala       |   98 -
 .../carbondata/mv/rewrite/Tpcds_1_4_Suite.scala    |   84 -
 datamap/mv/plan/pom.xml                            |  161 --
 docs/alluxio-guide.md                              |   10 +-
 docs/datamap-developer-guide.md                    |   34 -
 docs/datamap/datamap-management.md                 |  141 --
 docs/ddl-of-carbondata.md                          |    4 +-
 docs/documentation.md                              |    2 +-
 docs/index-developer-guide.md                      |   34 +
 docs/index-server.md                               |    4 +-
 .../bloomfilter-index-guide.md}                    |    0
 docs/index/index-management.md                     |  141 ++
 .../lucene-index-guide.md}                         |    0
 .../mv-datamap-guide.md => index/mv-guide.md}      |    0
 docs/introduction.md                               |    6 +-
 docs/language-manual.md                            |    6 +-
 docs/performance-tuning.md                         |    2 +-
 docs/sdk-guide.md                                  |   12 +-
 docs/streaming-guide.md                            |    4 +-
 examples/flink/pom.xml                             |    2 +-
 examples/spark/pom.xml                             |  256 +++
 .../examples/sdk/CarbonReaderExample.java          |    0
 .../carbondata/examples/sdk/SDKS3Example.java      |    0
 .../carbondata/examples/sdk/SDKS3ReadExample.java  |    0
 .../examples/sdk/SDKS3SchemaReadExample.java       |    0
 .../examples/sql/JavaCarbonSessionExample.java     |   92 +
 .../src/main/resources/Test_Data1.csv              |    0
 .../src/main/resources/complexdata.csv             |    0
 .../{spark2 => spark}/src/main/resources/data.csv  |    0
 .../{spark2 => spark}/src/main/resources/data1.csv |    0
 .../src/main/resources/dataSample.csv              |    0
 .../spark/src/main}/resources/dimSample.csv        |    0
 .../src/main/resources/factSample.csv              |    0
 .../src/main/resources/log4j.properties            |    0
 .../src/main/resources/sample.csv                  |    0
 .../src/main/resources/streamSample.csv            |    0
 .../benchmark/ConcurrentQueryBenchmark.scala       |  580 +++++
 .../org/apache/carbondata/benchmark/Query.scala    |    0
 .../carbondata/benchmark/SCDType2Benchmark.scala   |  300 +++
 .../benchmark/SimpleQueryBenchmark.scala           |  346 +++
 .../carbondata/examples/AlluxioExample.scala       |    0
 .../carbondata/examples/AlterTableExample.scala    |    0
 .../examples/CarbonDataFrameExample.scala          |    0
 .../carbondata/examples/CarbonSessionExample.scala |  147 ++
 .../examples/CarbonSortColumnsExample.scala        |  122 ++
 .../examples/CaseClassDataFrameAPIExample.scala    |    0
 .../examples/CustomCompactionExample.scala         |  101 +
 .../examples/DataFrameComplexTypeExample.scala     |    0
 .../examples/DataManagementExample.scala           |  114 +
 .../examples/DataUpdateDeleteExample.scala         |    0
 .../carbondata/examples/DirectSQLExample.scala     |  113 +
 .../carbondata/examples/ExternalTableExample.scala |  104 +
 .../carbondata/examples/HadoopFileExample.scala    |   77 +
 .../apache/carbondata/examples/HiveExample.scala   |  303 +++
 .../carbondata/examples/LuceneDataMapExample.scala |    0
 .../org/apache/carbondata/examples/MVExample.scala |    0
 .../carbondata/examples/QuerySegmentExample.scala  |  150 ++
 .../apache/carbondata/examples/S3CsvExample.scala  |   99 +
 .../org/apache/carbondata/examples/S3Example.scala |  141 ++
 .../carbondata/examples/S3UsingSDkExample.scala    |    0
 .../carbondata/examples/SparkSessionExample.scala  |  179 ++
 .../examples/SparkStreamingExample.scala           |  210 ++
 .../examples/StandardPartitionExample.scala        |  199 ++
 .../carbondata/examples/StreamSQLExample.scala     |    0
 .../examples/StreamingUsingBatchLoadExample.scala  |  208 ++
 .../examples/StreamingWithRowParserExample.scala   |  214 ++
 .../examples/StructuredStreamingExample.scala      |  209 ++
 .../TableLevelCompactionOptionExample.scala        |  125 ++
 .../carbondata/examples/util/ExampleUtils.scala    |  149 ++
 .../apache/carbondata/examplesCI/RunExamples.scala |  129 ++
 examples/spark2/pom.xml                            |  256 ---
 .../examples/sql/JavaCarbonSessionExample.java     |   92 -
 .../benchmark/ConcurrentQueryBenchmark.scala       |  580 -----
 .../carbondata/benchmark/SCDType2Benchmark.scala   |  300 ---
 .../benchmark/SimpleQueryBenchmark.scala           |  346 ---
 .../carbondata/examples/CarbonSessionExample.scala |  147 --
 .../examples/CarbonSortColumnsExample.scala        |  122 --
 .../examples/CustomCompactionExample.scala         |  101 -
 .../examples/DataManagementExample.scala           |  114 -
 .../carbondata/examples/DirectSQLExample.scala     |  113 -
 .../carbondata/examples/ExternalTableExample.scala |  104 -
 .../carbondata/examples/HadoopFileExample.scala    |   77 -
 .../apache/carbondata/examples/HiveExample.scala   |  303 ---
 .../carbondata/examples/QuerySegmentExample.scala  |  150 --
 .../apache/carbondata/examples/S3CsvExample.scala  |   99 -
 .../org/apache/carbondata/examples/S3Example.scala |  141 --
 .../carbondata/examples/SparkSessionExample.scala  |  179 --
 .../examples/SparkStreamingExample.scala           |  210 --
 .../examples/StandardPartitionExample.scala        |  199 --
 .../examples/StreamingUsingBatchLoadExample.scala  |  208 --
 .../examples/StreamingWithRowParserExample.scala   |  214 --
 .../examples/StructuredStreamingExample.scala      |  209 --
 .../TableLevelCompactionOptionExample.scala        |  125 --
 .../carbondata/examples/util/ExampleUtils.scala    |  149 --
 .../apache/carbondata/examplesCI/RunExamples.scala |  129 --
 index/bloom/pom.xml                                |   82 +
 .../datamap/bloom/AbstractBloomDataMapWriter.java  |    0
 .../datamap/bloom/BloomCacheKeyValue.java          |    0
 .../datamap/bloom/BloomCoarseGrainDataMap.java     |    0
 .../bloom/BloomCoarseGrainDataMapFactory.java      |    0
 .../datamap/bloom/BloomDataMapBuilder.java         |    0
 .../datamap/bloom/BloomDataMapCache.java           |    0
 .../datamap/bloom/BloomDataMapDistributable.java   |    0
 .../datamap/bloom/BloomDataMapModel.java           |    0
 .../datamap/bloom/BloomDataMapWriter.java          |    0
 .../datamap/bloom/BloomIndexFileStore.java         |    0
 .../carbondata/datamap/bloom/DataConvertUtil.java  |    0
 .../hadoop/util/bloom/CarbonBloomFilter.java       |    0
 index/examples/pom.xml                             |   83 +
 .../datamap/examples/BlockletMinMax.java           |    0
 .../datamap/examples/MinMaxDataWriter.java         |    0
 .../datamap/examples/MinMaxIndexBlockDetails.java  |    0
 .../datamap/examples/MinMaxIndexDataMap.java       |    0
 .../examples/MinMaxIndexDataMapFactory.java        |    0
 .../datamap/examples/MinMaxDataMapSuite.scala      |    0
 index/lucene/pom.xml                               |  145 ++
 .../datamap/lucene/LuceneDataMapBuilder.java       |    0
 .../datamap/lucene/LuceneDataMapDistributable.java |    0
 .../datamap/lucene/LuceneDataMapFactoryBase.java   |    0
 .../datamap/lucene/LuceneDataMapWriter.java        |    0
 .../datamap/lucene/LuceneFineGrainDataMap.java     |    0
 .../lucene/LuceneFineGrainDataMapFactory.java      |    0
 index/secondary-index/pom.xml                      |  244 +++
 .../CarbonDataFileMergeTestCaseOnSI.scala          |    0
 .../CarbonIndexFileMergeTestCaseWithSI.scala       |    0
 .../testsuite/secondaryindex/DropTableTest.scala   |    0
 .../InsertIntoCarbonTableTestCase.scala            |    0
 ...tAlterTableColumnRenameWithSecondaryIndex.scala |    0
 .../TestBroadCastSIFilterPushJoinWithUDF.scala     |    0
 .../TestCTASWithSecondaryIndex.scala               |    0
 .../secondaryindex/TestCacheOperationsForSI.scala  |    0
 .../testsuite/secondaryindex/TestCarbonJoin.scala  |    0
 .../TestCreateIndexForCleanAndDeleteSegment.scala  |    0
 .../secondaryindex/TestCreateIndexTable.scala      |    0
 .../TestCreateIndexWithLoadAndCompaction.scala     |  267 +++
 .../TestLikeQueryWithSecondaryIndex.scala          |    0
 .../TestNIQueryWithSecondaryIndex.scala            |    0
 .../TestRegisterIndexCarbonTable.scala             |    0
 .../secondaryindex/TestSIWithAddSegment.scala      |    0
 .../secondaryindex/TestSIWithSecondryIndex.scala   |    0
 .../TestSecondaryIndexForORFilterPushDown.scala    |    0
 .../TestSecondaryIndexWithAggQueries.scala         |    0
 .../secondaryindex/TestSecondaryIndexWithIUD.scala |    0
 ...IndexWithIndexOnFirstColumnAndSortColumns.scala |    0
 .../TestSecondaryIndexWithLocalDictionary.scala    |    0
 .../TestSecondaryIndexWithUnsafeColumnPage.scala   |    0
 .../apache/spark/util/TestCarbonSegmentUtil.scala  |  313 +++
 integration/flink/pom.xml                          |    6 +-
 .../carbondata/hive/CarbonObjectInspector.java     |    2 +
 integration/presto/pom.xml                         |    2 +-
 integration/spark-common-cluster-test/pom.xml      |   16 +-
 .../sdv/generated/ComplexDataTypeTestCase.scala    |    2 +-
 .../datasource/SparkCarbonDataSourceTestCase.scala |    8 +-
 .../spark/sql/common/util/DataSourceTestUtil.scala |    6 +-
 integration/spark-common-test/pom.xml              |  443 ----
 .../src/test/resources/structofarray.csv           |   10 -
 .../spark/testsuite/bigdecimal/TestBigInt.scala    |  102 -
 .../TestDimensionWithDecimalDataType.scala         |   63 -
 .../testsuite/binary/TestBinaryDataType.scala      | 1669 --------------
 .../complexType/TestAdaptiveComplexType.scala      |  570 -----
 .../TestAdaptiveEncodingForNullValues.scala        |  173 --
 .../complexType/TestCompactionComplexType.scala    | 1138 ----------
 .../complexType/TestComplexDataType.scala          | 1173 ----------
 .../complexType/TestComplexTypeQuery.scala         |  301 ---
 .../dataload/TestLoadDataWithBlankLine.scala       |   63 -
 .../dataload/TestLoadDataWithCompression.scala     |  645 ------
 .../TestLoadDataWithEmptyArrayColumns.scala        |   63 -
 .../dataload/TestLoadDataWithJunkChars.scala       |   57 -
 .../dataload/TestNoInvertedIndexLoadAndQuery.scala |  309 ---
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala      |   59 -
 .../spark/testsuite/emptyrow/TestEmptyRows.scala   |   89 -
 .../TestAdaptiveEncodingForPrimitiveTypes.scala    |  412 ----
 .../testsuite/addsegment/AddSegmentTestCase.scala  |  859 --------
 .../aggquery/AllDataTypesTestCaseAggregate.scala   |  106 -
 .../testsuite/aggquery/AverageQueryTestCase.scala  |  115 -
 .../allqueries/AllDataTypesTestCase.scala          | 1172 ----------
 .../allqueries/MeasureOnlyTableTestCases.scala     |  396 ----
 .../TestAlterTableSortColumnsProperty.scala        |  580 -----
 .../badrecordloger/BadRecordActionTest.scala       |  277 ---
 .../badrecordloger/BadRecordEmptyDataTest.scala    |  187 --
 .../badrecordloger/BadRecordLoggerTest.scala       |  359 ---
 .../testsuite/bigdecimal/TestBigDecimal.scala      |  223 --
 .../bigdecimal/TestNullAndEmptyFields.scala        |  116 -
 .../bigdecimal/TestNullAndEmptyFieldsUnsafe.scala  |  117 -
 .../CarbonCustomBlockDistributionTest.scala        |  113 -
 .../compaction/TestHybridCompaction.scala          |  234 --
 .../TestCreateDDLForComplexMapType.scala           |  531 -----
 .../createTable/TestCreateTableAsSelect.scala      |  458 ----
 .../TestNonTransactionalCarbonTableForBinary.scala |  162 --
 ...nTransactionalCarbonTableWithAvroDataType.scala | 1264 -----------
 .../CompactionSupportGlobalSortParameterTest.scala |  582 -----
 .../DataCompactionBlockletBoundryTest.scala        |   92 -
 .../DataCompactionBoundaryConditionsTest.scala     |  105 -
 .../DataCompactionCardinalityBoundryTest.scala     |  120 --
 .../datacompaction/DataCompactionLockTest.scala    |  120 --
 .../MajorCompactionIgnoreInMinorTest.scala         |  186 --
 .../MajorCompactionStopsAfterCompaction.scala      |  140 --
 .../TestDataWithDicExcludeAndInclude.scala         |   97 -
 .../dataload/TestGlobalSortDataLoad.scala          |  491 -----
 .../TestLoadDataWithDiffTimestampFormat.scala      |  133 --
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala  |  763 -------
 .../TestLoadDataWithHiveSyntaxUnsafe.scala         |  716 ------
 .../dataload/TestRangeColumnDataLoad.scala         |  949 --------
 .../dataload/TestTableLevelBlockSize.scala         |  148 --
 .../testsuite/dataload/TestTableLoadMinSize.scala  |  169 --
 .../testsuite/datamap/CGDataMapTestCase.scala      |  571 -----
 .../testsuite/datamap/FGDataMapTestCase.scala      |  583 -----
 .../dataretention/DataRetentionTestCase.scala      |  288 ---
 .../deleteTable/TestDeleteTableNewDDL.scala        |  244 ---
 .../testsuite/detailquery/CastColumnTestCase.scala |  970 ---------
 .../HighCardinalityDataTypesTestCase.scala         |  248 ---
 .../RangeFilterAllDataTypesTestCases.scala         |  675 ------
 .../detailquery/RangeFilterTestCase.scala          |  636 ------
 .../DateDataTypeDirectDictionaryTest.scala         |  159 --
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |  102 -
 ...rectDictionaryWithOffHeapSortDisabledTest.scala |   81 -
 .../DateDataTypeNullDataTest.scala                 |   82 -
 ...TimestampDataTypeDirectDictionaryTestCase.scala |  182 --
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |   99 -
 .../TimestampNoDictionaryColumnTestCase.scala      |   91 -
 .../testsuite/filterexpr/CountStarTestCase.scala   |   74 -
 .../filterexpr/FilterProcessorTestCase.scala       |  402 ----
 .../filterexpr/GrtLtFilterProcessorTestCase.scala  |  187 --
 .../NullMeasureValueTestCaseFilter.scala           |   55 -
 .../TestAndEqualFilterEmptyOperandValue.scala      |   76 -
 .../testsuite/filterexpr/TestGrtLessFilter.scala   |   88 -
 .../spark/testsuite/filterexpr/TestInFilter.scala  |  173 --
 .../testsuite/filterexpr/TestNotNullFilter.scala   |   59 -
 .../FlatFolderTableLoadingTestCase.scala           |  182 --
 .../testsuite/iud/DeleteCarbonTableTestCase.scala  |  382 ----
 .../testsuite/iud/UpdateCarbonTableTestCase.scala  |  899 --------
 .../UpdateCarbonTableTestCaseWithBadRecord.scala   |   69 -
 .../LocalDictionarySupportLoadTableTest.scala      |  338 ---
 .../NullMeasureValueTestCaseAggregate.scala        |   81 -
 .../TestNullValueSerialization.scala               |  100 -
 .../testsuite/partition/TestShowPartitions.scala   |   94 -
 .../sdk/TestSDKWithTransactionalTable.scala        |  114 -
 .../testsuite/sortcolumns/TestSortColumns.scala    |  394 ----
 .../StandardPartitionGlobalSortTestCase.scala      | 1069 ---------
 .../StandardPartitionTableCleanTestCase.scala      |  187 --
 .../StandardPartitionTableCompactionTestCase.scala |  224 --
 .../StandardPartitionTableDropTestCase.scala       |  228 --
 .../StandardPartitionTableLoadingTestCase.scala    |  588 -----
 .../StandardPartitionTableOverwriteTestCase.scala  |  321 ---
 .../StandardPartitionTableQueryTestCase.scala      |  542 -----
 .../windowsexpr/WindowsExprTestCase.scala          |   66 -
 integration/spark-common/pom.xml                   |  298 ---
 .../carbondata/spark/rdd/SparkReadSupport.scala    |   28 -
 .../apache/spark/sql/profiler/ProfilerLogger.scala |  367 ----
 .../apache/spark/sql/test/TestQueryExecutor.scala  |  187 --
 .../org/apache/spark/sql/test/util/QueryTest.scala |  201 --
 integration/spark-datasource/pom.xml               |  214 --
 ...org.apache.spark.sql.sources.DataSourceRegister |   17 -
 .../org/apache/carbondata/sdk/util/BinaryUtil.java |   89 -
 .../src/test/resources/structofarray.csv           |   21 -
 .../SparkCarbonDataSourceBinaryTest.scala          |  729 -------
 .../datasource/SparkCarbonDataSourceTest.scala     | 1983 -----------------
 ...TestCreateTableUsingSparkCarbonFileFormat.scala |  507 -----
 .../spark/sql/carbondata/datasource/TestUtil.scala |  181 --
 integration/spark/pom.xml                          |  589 +++++
 .../apache/carbondata/datamap/DataMapManager.java  |    0
 .../carbondata/datamap/IndexDataMapProvider.java   |    0
 .../spark/exception/ProcessMetaDataException.java  |    0
 .../spark/load/DecimalSerializableComparator.java  |    0
 .../SparkGenericRowReadSupportImpl.java            |    0
 .../spark/readsupport/SparkRowReadSupportImpl.java |    0
 .../org/apache/carbondata/spark/util/Util.java     |    0
 .../org/apache/carbondata/api/CarbonStore.scala    |    0
 .../converter/SparkDataTypeConverterImpl.java      |    0
 .../datamap/CarbonMergeBloomIndexFilesRDD.scala    |    0
 .../datamap/IndexDataMapRebuildRDD.scala           |    0
 .../apache/carbondata/datamap/TextMatchUDF.scala   |    0
 .../carbondata/events/AlterTableEvents.scala       |    0
 .../org/apache/carbondata/events/CacheEvents.scala |    0
 .../carbondata/events/CarbonInitEvents.scala       |    0
 .../carbondata/events/CleanFilesEvents.scala       |    0
 .../events/CreateCarbonRelationEvent.scala         |    0
 .../carbondata/events/CreateDatabaseEvents.scala   |    0
 .../carbondata/events/CreateTableEvents.scala      |    0
 .../apache/carbondata/events/DataMapEvents.scala   |    0
 .../carbondata/events/DeleteSegmentEvents.scala    |    0
 .../carbondata/events/DropDataMapEvents.scala      |    0
 .../apache/carbondata/events/DropTableEvents.scala |    0
 .../org/apache/carbondata/events/Events.scala      |    0
 .../org/apache/carbondata/events/IUDEvents.scala   |    0
 .../carbondata/events/IndexServerEvents.scala      |    0
 .../carbondata/events/LookupRelationEvents.scala   |    0
 .../carbondata/events/RefreshTableEvents.scala     |    0
 .../events/exception/EventExceptions.scala         |    0
 .../scala/org/apache/carbondata/geo/GeoUtils.scala |    0
 .../org/apache/carbondata/geo/InPolygonUDF.scala   |    0
 .../carbondata/indexserver/DataMapJobs.scala       |    0
 .../indexserver/DistributedCountRDD.scala          |    0
 .../indexserver/DistributedPruneRDD.scala          |    0
 .../indexserver/DistributedRDDUtils.scala          |    0
 .../indexserver/DistributedShowCacheRDD.scala      |    0
 .../carbondata/indexserver/IndexServer.scala       |    0
 .../indexserver/InvalidateSegmentCacheRDD.scala    |    0
 .../carbondata/indexserver/SegmentPruneRDD.scala   |    0
 .../carbondata/spark/CarbonColumnValidator.scala   |    0
 .../org/apache/carbondata/spark/CarbonOption.scala |    0
 .../carbondata/spark/CarbonSparkFactory.scala      |    0
 .../apache/carbondata/spark/InitInputMetrics.java  |    0
 .../scala/org/apache/carbondata/spark/KeyVal.scala |    0
 .../apache/carbondata/spark/StreamingOption.scala  |    0
 .../carbondata/spark/load/CsvRDDHelper.scala       |    0
 .../spark/load/DataLoadProcessBuilderOnSpark.scala |    0
 .../spark/load/DataLoadProcessorStepOnSpark.scala  |    0
 .../carbondata/spark/load/GlobalSortHelper.scala   |    0
 .../spark/rdd/CarbonDataRDDFactory.scala           |    0
 .../spark/rdd/CarbonDeltaRowScanRDD.scala          |    0
 .../spark/rdd/CarbonDropPartitionRDD.scala         |    0
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala      |    0
 .../carbondata/spark/rdd/CarbonIUDMergerRDD.scala  |    0
 .../carbondata/spark/rdd/CarbonMergerRDD.scala     |    0
 .../apache/carbondata/spark/rdd/CarbonRDD.scala    |    0
 .../carbondata/spark/rdd/CarbonScanRDD.scala       |    0
 .../spark/rdd/CarbonSparkPartition.scala           |    0
 .../spark/rdd/CarbonTableCompactor.scala           |    0
 .../carbondata/spark/rdd/CompactionFactory.scala   |    0
 .../apache/carbondata/spark/rdd/Compactor.scala    |    0
 .../spark/rdd/InsertTaskCompletionListener.scala   |    0
 .../spark/rdd/NewCarbonDataLoadRDD.scala           |    0
 .../spark/rdd/QueryTaskCompletionListener.scala    |    0
 .../carbondata/spark/rdd/SparkReadSupport.scala    |   29 +
 .../carbondata/spark/rdd/StreamHandoffRDD.scala    |    0
 .../carbondata/spark/rdd/UpdateDataLoad.scala      |    0
 .../spark/thriftserver/CarbonThriftServer.scala    |    0
 .../carbondata/spark/util/CarbonScalaUtil.scala    |    0
 .../carbondata/spark/util/CarbonSparkUtil.scala    |    0
 .../apache/carbondata/spark/util/CommonUtil.scala  |    0
 .../carbondata/spark/util/DataGenerator.scala      |    0
 .../spark/util/DataTypeConverterUtil.scala         |    0
 .../spark/vectorreader/ColumnarVectorWrapper.java  |    0
 .../vectorreader/ColumnarVectorWrapperDirect.java  |    0
 .../vectorreader/VectorizedCarbonRecordReader.java |    0
 .../apache/carbondata/store/SparkCarbonStore.scala |    0
 .../stream/CarbonStreamRecordReader.java           |    0
 .../carbondata/stream/StreamJobManager.scala       |    0
 .../streaming/CarbonSparkStreamingListener.scala   |    0
 .../streaming/CarbonStreamSparkStreaming.scala     |    0
 .../streaming/CarbonStreamingQueryListener.scala   |    0
 .../carbondata/streaming/StreamSinkFactory.scala   |    0
 .../org/apache/spark/CarbonInputMetrics.scala      |    0
 .../apache/spark/DataSkewRangePartitioner.scala    |    0
 .../org/apache/spark/rdd/CarbonMergeFilesRDD.scala |    0
 .../apache/spark/rdd/DataLoadCoalescedRDD.scala    |    0
 .../spark/rdd/DataLoadPartitionCoalescer.scala     |    0
 .../apache/spark/sql/CarbonBoundReference.scala    |    0
 .../apache/spark/sql/CarbonCatalystOperators.scala |    0
 .../org/apache/spark/sql/CarbonCountStar.scala     |    0
 .../apache/spark/sql/CarbonDataFrameWriter.scala   |    0
 .../spark/sql/CarbonDatasourceHadoopRelation.scala |    0
 .../apache/spark/sql/CarbonDictionaryWrapper.java  |    0
 .../scala/org/apache/spark/sql/CarbonEnv.scala     |  443 ++++
 .../org/apache/spark/sql/CarbonExpressions.scala   |    0
 .../org/apache/spark/sql/CarbonExtensions.scala    |    0
 .../scala/org/apache/spark/sql/CarbonSession.scala |    0
 .../scala/org/apache/spark/sql/CarbonSource.scala  |    0
 .../spark/sql/CarbonSparkStreamingFactory.scala    |    0
 .../scala/org/apache/spark/sql/CarbonUtils.scala   |    0
 .../org/apache/spark/sql/CarbonVectorProxy.java    |    0
 .../org/apache/spark/sql/ColumnVectorFactory.java  |    0
 .../spark/sql/CustomDeterministicExpression.scala  |    0
 .../scala/org/apache/spark/sql/EnvHelper.scala     |    0
 .../main/scala/org/apache/spark/sql/SQLConf.scala  |    0
 .../apache/spark/sql/SparkUnknownExpression.scala  |    0
 .../execution/datasources/CarbonFileIndex.scala    |    0
 .../datasources/CarbonFileIndexReplaceRule.scala   |    0
 .../datasources/CarbonSparkDataSourceUtil.scala    |    0
 .../datasources/SparkCarbonFileFormat.scala        |    0
 .../readsupport/SparkUnsafeRowReadSuport.scala     |    0
 .../CarbonTaskCompletionListener.scala             |    0
 .../catalyst/AbstractCarbonSparkSQLParser.scala    |    0
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala    |    0
 .../spark/sql/catalyst/CarbonParserUtil.scala      |    0
 .../catalyst/CarbonTableIdentifierImplicit.scala   |    0
 .../spark/sql/catalyst/analysis/EmptyRule.scala    |    0
 .../sql/events/MergeBloomIndexEventListener.scala  |    0
 .../spark/sql/events/MergeIndexEventListener.scala |    0
 .../sql/execution/CastExpressionOptimization.scala |    0
 .../sql/execution/command/cache/CacheUtil.scala    |    0
 .../command/cache/CarbonDropCacheCommand.scala     |    0
 .../command/cache/CarbonShowCacheCommand.scala     |    0
 .../command/carbonTableSchemaCommon.scala          |    0
 .../datamap/CarbonCreateDataMapCommand.scala       |    0
 .../datamap/CarbonDataMapRebuildCommand.scala      |    0
 .../command/datamap/CarbonDataMapShowCommand.scala |    0
 .../command/datamap/CarbonDropDataMapCommand.scala |    0
 .../command/management/CarbonAddLoadCommand.scala  |    0
 .../CarbonAlterTableCompactionCommand.scala        |    0
 .../CarbonAlterTableFinishStreaming.scala          |    0
 .../management/CarbonCleanFilesCommand.scala       |    0
 .../command/management/CarbonCliCommand.scala      |    0
 .../management/CarbonDeleteLoadByIdCommand.scala   |    0
 .../CarbonDeleteLoadByLoadDateCommand.scala        |    0
 .../management/CarbonDeleteStageFilesCommand.scala |    0
 .../management/CarbonInsertFromStageCommand.scala  |    0
 .../management/CarbonInsertIntoCommand.scala       |    0
 .../CarbonInsertIntoHadoopFsRelationCommand.scala  |    0
 .../management/CarbonInsertIntoWithDf.scala        |    0
 .../command/management/CarbonLoadDataCommand.scala |    0
 .../command/management/CarbonLoadParams.scala      |    0
 .../management/CarbonShowLoadsCommand.scala        |    0
 .../command/management/CommonLoadUtils.scala       |    0
 .../management/RefreshCarbonTableCommand.scala     |    0
 .../mutation/CarbonProjectForDeleteCommand.scala   |    0
 .../mutation/CarbonProjectForUpdateCommand.scala   |    0
 .../command/mutation/CarbonTruncateCommand.scala   |    0
 .../command/mutation/DeleteExecution.scala         |    0
 .../command/mutation/HorizontalCompaction.scala    |    0
 .../mutation/HorizontalCompactionException.scala   |    0
 .../execution/command/mutation/IUDCommonUtil.scala |    0
 .../mutation/merge/CarbonMergeDataSetCommand.scala |    0
 .../merge/CarbonMergeDataSetException.scala        |    0
 .../mutation/merge/HistoryTableLoadHelper.scala    |    0
 .../mutation/merge/MergeDataSetBuilder.scala       |    0
 .../command/mutation/merge/MergeProjection.scala   |    0
 .../command/mutation/merge/MutationAction.scala    |    0
 .../command/mutation/merge/TranxManager.scala      |    0
 .../command/mutation/merge/interfaces.scala        |    0
 .../spark/sql/execution/command/package.scala      |    0
 .../CarbonAlterTableAddHivePartitionCommand.scala  |    0
 .../CarbonAlterTableDropHivePartitionCommand.scala |    0
 .../schema/CarbonAlterTableAddColumnCommand.scala  |    0
 ...nAlterTableColRenameDataTypeChangeCommand.scala |    0
 .../schema/CarbonAlterTableDropColumnCommand.scala |    0
 .../schema/CarbonAlterTableRenameCommand.scala     |    0
 .../schema/CarbonAlterTableSetCommand.scala        |    0
 .../schema/CarbonAlterTableUnsetCommand.scala      |    0
 .../command/stream/CarbonCreateStreamCommand.scala |    0
 .../command/stream/CarbonDropStreamCommand.scala   |    0
 .../command/stream/CarbonShowStreamsCommand.scala  |    0
 .../table/CarbonCreateDataSourceTableCommand.scala |    0
 .../table/CarbonCreateTableAsSelectCommand.scala   |    0
 .../command/table/CarbonCreateTableCommand.scala   |    0
 .../table/CarbonCreateTableLikeCommand.scala       |    0
 .../table/CarbonDescribeFormattedCommand.scala     |    0
 .../command/table/CarbonDropTableCommand.scala     |    0
 .../command/table/CarbonExplainCommand.scala       |    0
 .../table/CarbonShowCreateTableCommand.scala       |    0
 .../command/table/CarbonShowTablesCommand.scala    |    0
 .../datasources/SparkCarbonTableFormat.scala       |    0
 .../strategy/CarbonLateDecodeStrategy.scala        |    0
 .../sql/execution/strategy/CarbonPlanHelper.scala  |    0
 .../spark/sql/execution/strategy/DDLHelper.scala   |  477 ++++
 .../spark/sql/execution/strategy/DDLStrategy.scala |  277 +++
 .../spark/sql/execution/strategy/DMLHelper.scala   |    0
 .../execution/strategy/MixedFormatHandler.scala    |    0
 .../sql/execution/strategy/PushDownHelper.scala    |    0
 .../strategy/StreamingTableStrategy.scala          |    0
 .../streaming/CarbonAppendableStreamSink.scala     |    0
 .../spark/sql/hive/CarbonAnalysisRules.scala       |    0
 .../org/apache/spark/sql/hive/CarbonAnalyzer.scala |    0
 .../spark/sql/hive/CarbonFileMetastore.scala       |    0
 .../spark/sql/hive/CarbonHiveMetaStore.scala       |    0
 .../spark/sql/hive/CarbonHiveMetadataUtil.scala    |    0
 .../org/apache/spark/sql/hive/CarbonMVRules.scala  |    0
 .../apache/spark/sql/hive/CarbonMetaStore.scala    |    0
 .../org/apache/spark/sql/hive/CarbonRelation.scala |    0
 .../spark/sql/hive/CarbonSessionCatalog.scala      |    0
 .../spark/sql/hive/CarbonSessionCatalogUtil.scala  |    0
 .../apache/spark/sql/hive/CarbonSessionUtil.scala  |    0
 .../spark/sql/hive/CarbonSqlAstBuilder.scala       |    0
 .../org/apache/spark/sql/hive/CarbonSqlConf.scala  |    0
 .../CreateCarbonSourceTableAsSelectCommand.scala   |    0
 .../apache/spark/sql/hive/DistributionUtil.scala   |    0
 .../spark/sql/hive/SqlAstBuilderHelper.scala       |    0
 .../spark/sql/hive/cli/CarbonSQLCLIDriver.scala    |    0
 .../execution/command/CarbonHiveCommands.scala     |    0
 .../execution/command/CarbonResetCommand.scala     |    0
 .../sql/listeners/DropCacheEventListeners.scala    |    0
 .../apache/spark/sql/listeners/MVListeners.scala   |    0
 .../spark/sql/listeners/PrePrimingListener.scala   |    0
 .../sql/listeners/ShowCacheEventListeners.scala    |    0
 .../apache/spark/sql/optimizer/CarbonFilters.scala |    0
 .../apache/spark/sql/optimizer/CarbonIUDRule.scala |    0
 .../sql/optimizer/CarbonUDFTransformRule.scala     |    0
 .../parser/CarbonExtensionSpark2SqlParser.scala    |   66 +
 .../sql/parser/CarbonExtensionSqlParser.scala      |    0
 .../spark/sql/parser/CarbonSpark2SqlParser.scala   |    0
 .../spark/sql/parser/CarbonSparkSqlParser.scala    |    0
 .../sql/parser/CarbonSparkSqlParserUtil.scala      |  747 +++++++
 .../org/apache/spark/sql/profiler/Profiler.scala   |    0
 .../spark/sql/profiler/ProfilerListener.scala      |    0
 .../apache/spark/sql/profiler/ProfilerLogger.scala |  367 ++++
 .../Jobs/BlockletDataMapDetailsWithSchema.java     |    0
 .../Jobs/CarbonBlockLoaderHelper.java              |    0
 .../Jobs/DistributableBlockletDataMapLoader.java   |    0
 .../Jobs/SparkBlockletDataMapLoaderJob.scala       |    0
 .../secondaryindex/command/DropIndexCommand.scala  |    0
 .../command/RegisterIndexTableCommand.scala        |    0
 .../secondaryindex/command/SICreationCommand.scala |    0
 .../sql/secondaryindex/command/SILoadCommand.scala |    0
 .../command/SIRebuildSegmentCommand.scala          |    0
 .../command/ShowIndexesCommand.scala               |    0
 .../AlterTableColumnRenameEventListener.scala      |    0
 .../AlterTableCompactionPostEventListener.scala    |    0
 .../events/AlterTableDropColumnEventListener.scala |    0
 .../AlterTableMergeIndexSIEventListener.scala      |    0
 .../events/AlterTableRenameEventListener.scala     |    0
 .../events/CleanFilesPostEventListener.scala       |    0
 .../events/CreateCarbonRelationEventListener.scala |    0
 .../events/DeleteFromTableEventListener.scala      |    0
 .../events/DeleteSegmentByDateListener.scala       |    0
 .../events/DeleteSegmentByIdListener.scala         |    0
 .../events/DropCacheSIEventListener.scala          |    0
 .../sql/secondaryindex/events/LoadSIEvents.scala   |    0
 .../events/SIDropEventListener.scala               |    0
 .../events/SILoadEventListener.scala               |    0
 .../SILoadEventListenerForFailedSegments.scala     |    0
 .../events/SIRefreshEventListener.scala            |    0
 .../events/ShowCacheSIEventListener.scala          |    0
 .../events/UpdateTablePreEventListener.scala       |    0
 .../exception/IndexTableExistException.java        |    0
 .../exception/SecondaryIndexException.java         |    0
 .../hive/CarbonInternalMetastore.scala             |  267 +++
 .../joins/BroadCastSIFilterPushJoin.scala          |    0
 .../load/CarbonInternalLoaderUtil.java             |    0
 .../spark/sql/secondaryindex/load/Compactor.scala  |    0
 .../load/RowComparatorWithOutKettle.java           |    0
 .../optimizer/CarbonCostBasedOptimizer.java        |    0
 .../optimizer/CarbonSITransformationRule.scala     |    0
 .../optimizer/CarbonSecondaryIndexOptimizer.scala  |    0
 .../query/CarbonSecondaryIndexExecutor.java        |    0
 .../query/SecondaryIndexQueryResultProcessor.java  |    0
 .../secondaryindex/rdd/CarbonSIRebuildRDD.scala    |    0
 .../rdd/CarbonSecondaryIndexRDD.scala              |    0
 .../secondaryindex/rdd/SecondaryIndexCreator.scala |    0
 .../util/CarbonInternalScalaUtil.scala             |    0
 .../sql/secondaryindex/util/FileInternalUtil.scala |    0
 .../sql/secondaryindex/util/IndexTableUtil.java    |    0
 .../sql/secondaryindex/util/InternalKeyVal.scala   |    0
 .../secondaryindex/util/SecondaryIndexUtil.scala   |    0
 .../spark/sql/test/ResourceRegisterAndCopier.scala |    0
 .../spark/sql/test/SparkTestQueryExecutor.scala    |   88 +
 .../apache/spark/sql/test/TestQueryExecutor.scala  |  187 ++
 .../spark/sql/test/util/CarbonFunSuite.scala       |    0
 .../org/apache/spark/sql/test/util/PlanTest.scala  |    0
 .../org/apache/spark/sql/test/util/QueryTest.scala |  283 +++
 .../apache/spark/sql/util/CarbonException.scala    |    0
 .../spark/sql/util/CarbonMetastoreTypes.scala      |    0
 .../org/apache/spark/sql/util/SparkSQLUtil.scala   |    0
 .../apache/spark/sql/util/SparkTypeConverter.scala |    0
 .../org/apache/spark/util/AlterTableUtil.scala     |    0
 .../apache/spark/util/CarbonReflectionUtils.scala  |    0
 .../scala/org/apache/spark/util/CleanFiles.scala   |    0
 .../scala/org/apache/spark/util/Compaction.scala   |    0
 .../apache/spark/util/DeleteSegmentByDate.scala    |    0
 .../org/apache/spark/util/DeleteSegmentById.scala  |    0
 .../scala/org/apache/spark/util/FileUtils.scala    |    0
 .../org/apache/spark/util/MergeIndexUtil.scala     |    0
 .../org/apache/spark/util/ScalaCompilerUtil.scala  |    0
 .../scala/org/apache/spark/util/SparkUtil.scala    |    0
 .../scala/org/apache/spark/util/TableAPIUtil.scala |    0
 .../scala/org/apache/spark/util/TableLoader.scala  |    0
 .../spark/adapter/CarbonToSparkAdapter.scala       |    0
 .../apache/spark/sql/CarbonBoundReference.scala    |    0
 .../apache/spark/sql/CarbonToSparkAdapter.scala    |  166 ++
 .../apache/spark/sql/MixedFormatHandlerUtil.scala  |    0
 .../execution/strategy/CarbonDataSourceScan.scala  |    0
 .../spark/sql/hive/CarbonSessionStateBuilder.scala |    0
 .../spark/adapter/CarbonToSparkAdapter.scala       |    0
 .../apache/spark/sql/CarbonBoundReference.scala    |    0
 .../apache/spark/sql/CarbonToSparkAdapter.scala    |  200 ++
 .../apache/spark/sql/MixedFormatHandlerUtil.scala  |    0
 .../execution/strategy/CarbonDataSourceScan.scala  |    0
 .../spark/sql/hive/CarbonSessionStateBuilder.scala |    0
 ...org.apache.spark.sql.sources.DataSourceRegister |   18 +
 ...apache.spark.sql.test.TestQueryExecutorRegister |   17 +
 .../org/apache/carbondata/sdk/util/BinaryUtil.java |    0
 .../stream/CarbonStreamRecordReaderTest.java       |    0
 .../src/test/resources/100_olap.csv                |    0
 .../src/test/resources/10dim_4msr.csv              |    0
 .../src/test/resources/32000char.csv               |    0
 .../src/test/resources/Array.csv                   |    0
 .../src/test/resources/IUD/T_Hive1.csv             |    0
 .../src/test/resources/IUD/bad_record.csv          |    0
 .../src/test/resources/IUD/badrecord.csv           |    0
 .../src/test/resources/IUD/comp1.csv               |    0
 .../src/test/resources/IUD/comp2.csv               |    0
 .../src/test/resources/IUD/comp3.csv               |    0
 .../src/test/resources/IUD/comp4.csv               |    0
 .../src/test/resources/IUD/dest.csv                |    0
 .../src/test/resources/IUD/negativevalue.csv       |    0
 .../src/test/resources/IUD/other.csv               |    0
 .../src/test/resources/IUD/sample.csv              |    0
 .../src/test/resources/IUD/sample_updated.csv      |    0
 .../src/test/resources/IUD/source2.csv             |    0
 .../src/test/resources/IUD}/source3.csv            |    0
 .../src/test/resources/IUD/update01.csv            |    0
 .../src/test/resources/OLDFORMATTABLE.csv          |    0
 .../src/test/resources/OLDFORMATTABLEHIVE.csv      |    0
 .../src/test/resources/Struct.csv                  |    0
 .../src/test/resources/StructofStruct.csv          |    0
 .../src/test/resources/Test_Data1_Logrithmic.csv   |    0
 .../src/test/resources/adap.csv                    |    0
 .../src/test/resources/adap_double1.csv            |    0
 .../src/test/resources/adap_double2.csv            |    0
 .../src/test/resources/adap_double3.csv            |    0
 .../src/test/resources/adap_double4.csv            |    0
 .../src/test/resources/adap_int1.csv               |    0
 .../src/test/resources/adap_int2.csv               |    0
 .../src/test/resources/adap_int3.csv               |    0
 .../src/test/resources/alldatatypeforpartition.csv |    0
 .../complex/20160423/1400_1405/complex.dictionary  |    0
 .../sample/20160423/1400_1405/sample.dictionary    |    0
 .../src/test/resources/array1.csv                  |    0
 .../src/test/resources/arrayColumnEmpty.csv        |    0
 .../src/test/resources/avgTest.csv                 |    0
 .../src/test/resources/badrecords/bigtab.csv       |    0
 .../src/test/resources/badrecords/bigtabbad.csv    |    0
 .../src/test/resources/badrecords/complexdata.csv  |    0
 .../src/test/resources/badrecords/datasample.csv   |    0
 .../src/test/resources/badrecords/dummy.csv        |    0
 .../src/test/resources/badrecords/dummy2.csv       |    0
 .../resources/badrecords/emptyTimeStampValue.csv   |    0
 .../src/test/resources/badrecords/emptyValues.csv  |    0
 .../resources/badrecords/insufficientColumns.csv   |    0
 .../test/resources/badrecords/seriazableValue.csv  |    0
 .../src/test/resources/bigIntData.csv              |    0
 .../src/test/resources/bigIntDataWithHeader.csv    |    0
 .../src/test/resources/bigIntDataWithoutHeader.csv |    0
 .../test/resources/big_decimal_without_header.csv  |    0
 .../src/test/resources/big_int_Decimal.csv         |    0
 .../src/test/resources/binaryDataBase64.csv        |    0
 .../src/test/resources/binaryDataHex.csv           |    0
 .../src/test/resources/binaryStringNullData.csv    |    0
 .../src/test/resources/binarystringdata.csv        |    0
 .../src/test/resources/binarystringdata2.csv       |    0
 .../test/resources/binarystringdatawithHead.csv    |    0
 .../src/test/resources/bool/supportBoolean.csv     |    0
 .../resources/bool/supportBooleanBadRecords.csv    |    0
 .../bool/supportBooleanDifferentFormat.csv         |    0
 .../resources/bool/supportBooleanOnlyBoolean.csv   |    0
 .../bool/supportBooleanTwoBooleanColumns.csv       |    0
 .../bool/supportBooleanWithFileHeader.csv          |    0
 .../src/test/resources/channelsId.csv              |    0
 .../src/test/resources/character_carbon.csv        |    0
 .../src/test/resources/character_hive.csv          |    0
 .../test/resources/columndictionary/country.csv    |    0
 .../src/test/resources/columndictionary/name.csv   |    0
 .../src/test/resources/comment.csv                 |    0
 .../src/test/resources/compaction/compaction1.csv  |    0
 .../resources/compaction/compaction1_forhive.csv   |    0
 .../src/test/resources/compaction/compaction2.csv  |    0
 .../src/test/resources/compaction/compaction3.csv  |    0
 .../test/resources/compaction/compactionIUD1.csv   |    0
 .../test/resources/compaction/compactionIUD2.csv   |    0
 .../test/resources/compaction/compactionIUD3.csv   |    0
 .../test/resources/compaction/compactionIUD4.csv   |    0
 .../test/resources/compaction/compactioncard2.csv  |    0
 .../compaction/compactioncard2_forhive.csv         |    0
 .../compaction/nodictionary_compaction.csv         |    0
 .../src/test/resources/complexTypeDecimal.csv      |    0
 .../test/resources/complexTypeDecimalNested.csv    |    0
 .../resources/complexTypeDecimalNestedHive.csv     |    0
 .../src/test/resources/complexbinary.csv           |    0
 .../src/test/resources/complexdata.csv             |    0
 .../src/test/resources/complexdata1.csv            |    0
 .../src/test/resources/complexdata2.csv            |    0
 .../src/test/resources/complexdata3.csv            |    0
 .../src/test/resources/complexdatareordered.csv    |    0
 .../src/test/resources/complexdatastructextra.csv  |    0
 .../resources/complextypediffentcolheaderorder.csv |    0
 .../src/test/resources/complextypesample.csv       |    0
 .../resources/complextypespecialchardelimiter.csv  |    0
 .../src/test/resources/data.csv                    |    0
 .../src/test/resources/data1.csv                   |    0
 .../src/test/resources/data2.csv                   |    0
 .../src/test/resources/data2_DiffTimeFormat.csv    |    0
 .../src/test/resources/dataIncrement.csv           |    0
 .../src/test/resources/dataWithEmptyRows.csv       |    0
 .../src/test/resources/dataWithNegativeValues.csv  |    0
 .../src/test/resources/dataWithNullFirstLine.csv   |    0
 .../src/test/resources/dataWithSingleQuote.csv     |    0
 .../src/test/resources/data_alltypes.csv           |    0
 .../src/test/resources/data_beyond68yrs.csv        |    0
 .../src/test/resources/data_big.csv                |    0
 .../test/resources/data_partition_badrecords.csv   |    0
 .../src/test/resources/data_sort.csv               |    0
 .../src/test/resources/data_timestamp.csv          |    0
 .../src/test/resources/data_withCAPSHeader.csv     |    0
 .../src/test/resources/data_withMixedHeader.csv    |    0
 .../src/test/resources/data_with_all_types.csv     |    0
 .../src/test/resources/data_with_special_char.csv  |    0
 .../src/test/resources/datadelimiter.csv           |    0
 .../src/test/resources/datanullmeasurecol.csv      |    0
 .../src/test/resources/dataretention1.csv          |    0
 .../src/test/resources/dataretention11.csv         |    0
 .../src/test/resources/dataretention2.csv          |    0
 .../src/test/resources/dataretention3.csv          |    0
 .../src/test/resources/datasample.csv              |    0
 .../src/test/resources/datasamplecomplex.csv       |    0
 .../src/test/resources/datasamplefordate.csv       |    0
 .../src/test/resources/datasamplenull.csv          |    0
 .../src/test/resources/datasingleCol.csv           |    0
 .../src/test/resources/datasingleComplexCol.csv    |    0
 .../resources/datawithNegeativewithoutHeader.csv   |    0
 .../src/test/resources/datawithNegtiveNumber.csv   |    0
 .../src/test/resources/datawithbackslash.csv       |    0
 .../src/test/resources/datawithblanklines.csv      |    0
 .../test/resources/datawithcomplexspecialchar.csv  |    0
 .../src/test/resources/datawithescapecharacter.csv |    0
 .../src/test/resources/datawithmaxbigint.csv       |    0
 .../src/test/resources/datawithmaxinteger.csv      |    0
 .../src/test/resources/datawithmaxminbigint.csv    |    0
 .../src/test/resources/datawithmaxmininteger.csv   |    0
 .../src/test/resources/datawithminbigint.csv       |    0
 .../src/test/resources/datawithmininteger.csv      |    0
 .../src/test/resources/datawithnullmeasure.csv     |    0
 .../src/test/resources/datawithnullmsrs.csv        |    0
 .../src/test/resources/datawithoutheader.csv       |    0
 .../test/resources/datawithspecialcharacter.csv    |    0
 .../src/test/resources/datedatafile.csv            |    0
 .../src/test/resources/dblocation/test.csv         |    0
 .../test/resources/decimalBoundaryDataCarbon.csv   |    0
 .../src/test/resources/decimalBoundaryDataHive.csv |    0
 .../src/test/resources/decimalData.csv             |    0
 .../src/test/resources/decimalDataWithHeader.csv   |    0
 .../test/resources/decimalDataWithoutHeader.csv    |    0
 .../src/test/resources/decimal_int_range.csv       |    0
 .../src/test/resources/deviceInformationId.csv     |    0
 .../src/test/resources/deviceInformationId2.csv    |    0
 .../spark/src/test}/resources/dimSample.csv        |    0
 .../src/test/resources/dimTableSample.csv          |    0
 .../src/test/resources/double.csv                  |    0
 .../double/data_notitle_AdaptiveFloating_byte.csv  |    0
 .../double/data_notitle_AdaptiveFloating_int.csv   |    0
 .../double/data_notitle_AdaptiveFloating_short.csv |    0
 .../data_notitle_AdaptiveFloating_short_int.csv    |    0
 .../test/resources/double/data_notitle_byte.csv    |    0
 .../src/test/resources/double/data_notitle_int.csv |    0
 .../test/resources/double/data_notitle_long.csv    |    0
 .../test/resources/double/data_notitle_short.csv   |    0
 .../resources/double/data_notitle_short_int.csv    |    0
 .../src/test/resources/emp.csv                     |    0
 .../src/test/resources/emptyDimensionData.csv      |    0
 .../src/test/resources/emptyDimensionDataHive.csv  |    0
 .../src/test/resources/emptylines.csv              |    0
 .../resources/emptyrow/csvwithonlyspacechar.csv    |    0
 .../src/test/resources/emptyrow/emptyRows.csv      |    0
 .../src/test/resources/encoding_types.csv          |    0
 .../src/test/resources/filter/betweenFilter.csv    |    0
 .../src/test/resources/filter/datagrtlrt.csv       |    0
 .../src/test/resources/filter/datawithnull.csv     |    0
 .../src/test/resources/filter/datawithoutnull.csv  |    0
 .../src/test/resources/filter/emp2.csv             |    0
 .../src/test/resources/filter/emp2allnull.csv      |    0
 .../src/test/resources/filter/emp2nonull.csv       |    0
 .../src/test/resources/filter/notEqualToFilter.csv |    0
 .../src/test/resources/filter/notNullFilter.csv    |    0
 .../src/test/resources/floatSample.csv             |    0
 .../src/test/resources/geodata.csv                 |    0
 .../src/test/resources/globalsort/sample1.csv      |    0
 .../src/test/resources/globalsort/sample2.csv      |    0
 .../src/test/resources/globalsort/sample3.csv      |    0
 .../resources/hiverangenodictionarycompare.csv     |    0
 .../src/test/resources/invalidMeasures.csv         |    0
 .../src/test/resources/j2.csv                      |    0
 .../src/test/resources/join/data1.csv              |    0
 .../src/test/resources/join/data2.csv              |    0
 .../src/test/resources/join/emp.csv                |    0
 .../src/test/resources/join/employee.csv           |    0
 .../src/test/resources/join/mgr.csv                |    0
 .../src/test/resources/join/mobile.csv             |    0
 .../jsonFiles/data/PrimitiveTypeWithNull.json      |    0
 .../resources/jsonFiles/data/StructOfAllTypes.json |    0
 .../resources/jsonFiles/data/allPrimitiveType.json |    0
 .../jsonFiles/data/allPrimitiveTypeBadRecord.json  |    0
 .../jsonFiles/data/arrayOfStructOfStruct.json      |    0
 .../data/arrayOfarrayOfarrayOfStruct.json          |    0
 .../JsonReaderTest/MultipleRowSingleLineJson.json  |    0
 .../JsonReaderTest/SingleRowSingleLineJson.json    |    0
 ...pleRowMultipleLineJsonWithRecordIdentifier.json |    0
 ...gleRowMultipleLineJsonWithRecordIdentifier.json |    0
 ...ingleRowSingleLineJsonWithRecordIdentifier.json |    0
 .../allPrimitiveTypeMultipleRows.json              |    0
 .../allPrimitiveTypeSingleArray.json               |    0
 .../jsonFiles/schema/StructOfAllTypes.avsc         |    0
 .../jsonFiles/schema/arrayOfStructOfStruct.avsc    |    0
 .../schema/arrayOfarrayOfarrayOfStruct.avsc        |    0
 .../src/test/resources/lessthandatacolumndata.csv  |    0
 .../resources/loadMultiFiles/.invisibilityfile     |    0
 .../src/test/resources/loadMultiFiles/_SUCCESS     |    0
 .../src/test/resources/loadMultiFiles/data.csv     |    0
 .../test/resources/loadMultiFiles/emptyfile.csv    |    0
 .../loadMultiFiles/nestedfolder1/data.csv          |    0
 .../loadMultiFiles/nestedfolder1/data1.csv         |    0
 .../nestedfolder1/nestedfolder2/data.csv           |    0
 .../src/test/resources/loadMultiFiles/non-csv      |    0
 .../src/test/resources/localdictionary.csv         |    0
 .../test/resources/locationInfoActiveCountry.csv   |    0
 .../src/test/resources/mac.csv                     |    0
 .../src/test/resources/measureinsertintotest.csv   |    0
 .../src/test/resources/mobileimei.csv              |    0
 .../src/test/resources/mv_sampledata.csv           |    0
 .../src/test/resources/newsample.csv               |    0
 .../src/test/resources/noneCsvFormat.cs            |    0
 .../src/test/resources/nontransactional.csv        |    0
 .../src/test/resources/nontransactional1.csv       |    0
 .../src/test/resources/nullSample.csv              |    0
 .../src/test/resources/nullandnonparsableValue.csv |    0
 .../src/test/resources/nullmeasurevalue.csv        |    0
 .../src/test/resources/nullvalueserialization.csv  |    0
 .../resources/numeric_column_invalid_values.csv    |    0
 .../src/test/resources/oscon_10.csv                |    0
 .../src/test/resources/outofrange.csv              |    0
 .../test/resources/overwriteTable1_noRecord.csv    |    0
 .../test/resources/overwriteTable1_someRecord.csv  |    0
 .../test/resources/overwriteTable2_noRecord.csv    |    0
 .../test/resources/overwriteTable2_someRecord.csv  |    0
 .../src/test/resources/partData.csv                |    0
 .../src/test/resources/partition_data.csv          |    0
 .../src/test/resources/partition_data_example.csv  |    0
 .../resources/predefdic/allpredefdictionary.csv    |    0
 .../src/test/resources/predefdic/data3.csv         |    0
 .../src/test/resources/predefdic/dicfilepath.csv   |    0
 .../src/test/resources/products.csv                |    0
 .../src/test/resources/range_column/dataskew.csv   |    0
 .../src/test/resources/rangedata.csv               |    0
 .../src/test/resources/rangedatasample.csv         |    0
 .../test/resources/rangenodictionarycompare.csv    |    0
 .../src/test/resources/restructure/data1.csv       |    0
 .../src/test/resources/restructure/data2.csv       |    0
 .../src/test/resources/restructure/data3.csv       |    0
 .../src/test/resources/restructure/data4.csv       |    0
 .../src/test/resources/restructure/data5.csv       |    0
 .../src/test/resources/restructure/data6.csv       |    0
 .../src/test/resources/restructure/data7.csv       |    0
 .../src/test/resources/restructure/data_2000.csv   |    0
 .../src/test/resources/sales_data.csv              |    0
 .../src/test/resources/sample                      |    0
 .../src/test/resources/sample.csv                  |    0
 .../src/test/resources/sample.csv.bz2              |  Bin
 .../src/test/resources/sample.csv.gz               |  Bin
 .../src/test/resources/sampleComplex.csv           |    0
 .../src/test/resources/sample_withDelimiter017.csv |    0
 .../src/test/resources/secindex/IUD/sample_1.csv   |    0
 .../src/test/resources/secindex/IUD/sample_2.csv   |    0
 .../src/test/resources/secindex/data_10000.csv     |    0
 .../src/test/resources/secindex/datafile_100.csv   |    0
 .../src/test/resources/secindex/dest.csv           |    0
 .../src/test/resources/secindex/dest1.csv          |    0
 .../src/test/resources/secindex/dest2.csv          |    0
 .../src/test/resources/secindex/dest3.csv          |    0
 .../src/test/resources/secindex/firstunique.csv    |    0
 .../src/test/resources/secindex/index.csv          |    0
 .../resources/secindex/secondaryIndexLikeTest.csv  |    0
 .../src/test/resources/secindex/secondunique.csv   |    0
 .../src/test/resources/secindex}/source3.csv       |    0
 .../src/test/resources/seq_20Records.csv           |    0
 .../src/test/resources/shortintboundary.csv        |    0
 .../src/test/resources/shortolap.csv               |    0
 .../test/resources/sort_columns/alldatatype1.csv   |    0
 .../test/resources/sort_columns/alldatatype2.csv   |    0
 .../src/test/resources/source.csv                  |    0
 .../src/test/resources/source_without_header.csv   |    0
 .../src/test/resources/streamSample.csv            |    0
 .../resources/streamSample_with_long_string.csv    |    0
 .../src/test/resources/struct_all.csv              |    0
 .../spark/src/test/resources/structofarray.csv     |   21 +
 .../src/test/resources/structusingstruct.csv       |    0
 .../src/test/resources/temp/data1.csv              |    0
 .../src/test/resources/test.json                   |    0
 .../test/resources/testBigInt_boundary_value.csv   |    0
 .../src/test/resources/testShortAndIntDataType.csv |    0
 .../src/test/resources/test_json.json              |    0
 .../src/test/resources/timeStampFormatData1.csv    |    0
 .../src/test/resources/timeStampFormatData2.csv    |    0
 .../src/test/resources/timeseriestest.csv          |    0
 .../src/test/resources/timestamp.csv               |    0
 .../src/test/resources/timestampdata.csv           |    0
 .../src/test/resources/timestampdatafile.csv       |    0
 .../src/test/resources/tpch/customers.csv          |    0
 .../src/test/resources/tpch/lineitem.csv           |    0
 .../src/test/resources/tpch/nation.csv             |    0
 .../src/test/resources/tpch/orders.csv             |    0
 .../src/test/resources/tpch/region.csv             |    0
 .../src/test/resources/tpch/supplier.csv           |    0
 .../src/test/resources/unicodechar.csv             |    0
 .../src/test/resources/uniq.csv                    |    0
 .../src/test/resources/uniqwithoutheader.csv       |    0
 .../src/test/resources/vardhandaterestruct.csv     |    0
 .../src/test/resources/verticalDelimitedData.csv   |    0
 .../BloomCoarseGrainDataMapFunctionSuite.scala     |  838 +++++++
 .../bloom/BloomCoarseGrainDataMapSuite.scala       |    0
 .../bloom/BloomCoarseGrainDataMapTestUtil.scala    |    0
 .../lucene/LuceneCoarseGrainDataMapSuite.scala     |    0
 .../lucene/LuceneFineGrainDataMapSuite.scala       |    0
 .../scala/org/apache/carbondata/geo/GeoTest.scala  |    0
 .../aggquery/IntegerDataTypeTestCase.scala         |    0
 .../spark/testsuite/bigdecimal/TestBigInt.scala    |  103 +
 .../TestDimensionWithDecimalDataType.scala         |   64 +
 .../testsuite/binary/TestBinaryDataType.scala      | 1670 ++++++++++++++
 .../complexType/TestAdaptiveComplexType.scala      |  577 +++++
 .../TestAdaptiveEncodingForNullValues.scala        |  175 ++
 ...eEncodingSafeColumnPageForComplexDataType.scala |    0
 ...ncodingUnsafeColumnPageForComplexDataType.scala |    0
 ...ingUnsafeHeapColumnPageForComplexDataType.scala |    0
 .../complexType/TestAllComplexDataType.scala       |    0
 .../complexType/TestCompactionComplexType.scala    | 1140 ++++++++++
 .../complexType/TestComplexDataType.scala          | 1165 ++++++++++
 .../complexType/TestComplexTypeQuery.scala         |  301 +++
 .../complexType/TestComplexTypeWithBigArray.scala  |    0
 .../complexType/TestCreateTableWithDouble.scala    |    0
 .../dataload/MultiFilesDataLoagdingTestCase.scala  |    0
 .../testsuite/dataload/TestLoadDataGeneral.scala   |    0
 .../dataload/TestLoadDataWithAutoLoadMerge.scala   |    0
 .../dataload/TestLoadDataWithBlankLine.scala       |   62 +
 .../dataload/TestLoadDataWithCompression.scala     |  645 ++++++
 .../TestLoadDataWithEmptyArrayColumns.scala        |   65 +
 .../dataload/TestLoadDataWithJunkChars.scala       |   57 +
 .../dataload/TestLoadDataWithMaxMinBigInt.scala    |    0
 .../dataload/TestLoadDataWithMaxMinInteger.scala   |    0
 .../dataload/TestLoadDataWithNullMeasures.scala    |    0
 .../TestLoadDataWithSortColumnBounds.scala         |    0
 .../dataload/TestLoadDataWithUnsafeMemory.scala    |    0
 .../dataload/TestLoadDataWithYarnLocalDirs.scala   |    0
 .../dataload/TestNoInvertedIndexLoadAndQuery.scala |  312 +++
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala      |   60 +
 .../spark/testsuite/emptyrow/TestEmptyRows.scala   |   90 +
 .../testsuite/emptyrow/TestSkipEmptyLines.scala    |    0
 .../primitiveTypes/ArrayDataTypeTestCase.scala     |    0
 .../primitiveTypes/DoubleDataTypeTestCase.scala    |    0
 .../primitiveTypes/FloatDataTypeTestCase.scala     |    0
 .../primitiveTypes/MapDataTypeTestCase.scala       |    0
 .../TestAdaptiveEncodingForPrimitiveTypes.scala    |  412 ++++
 .../carbondata/spark/testsuite/TestCarbonCli.scala |   82 +
 .../testsuite/addsegment/AddSegmentTestCase.scala  |  862 ++++++++
 .../aggquery/AllDataTypesTestCaseAggregate.scala   |  109 +
 .../testsuite/aggquery/AverageQueryTestCase.scala  |  116 +
 .../allqueries/AllDataTypesTestCase.scala          | 1170 ++++++++++
 .../testsuite/allqueries/DoubleDataTypeTest.scala  |    0
 .../InsertIntoCarbonTableSpark2TestCase.scala      |   37 +
 .../allqueries/InsertIntoCarbonTableTestCase.scala |    0
 .../allqueries/MeasureOnlyTableTestCases.scala     |  394 ++++
 ...ryWithColumnMetCacheAndCacheLevelProperty.scala |    0
 .../allqueries/TestQueryWithoutDataLoad.scala      |    0
 .../allqueries/TestTableNameHasDbName.scala        |    0
 .../alterTable/TestAlterTableAddColumns.scala      |    0
 .../TestAlterTableCompactionLevelThreshold.scala   |    0
 .../TestAlterTableSortColumnsProperty.scala        |  586 +++++
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |    0
 .../badrecordloger/BadRecordActionTest.scala       |  278 +++
 .../badrecordloger/BadRecordEmptyDataTest.scala    |  182 ++
 .../badrecordloger/BadRecordLoggerTest.scala       |  361 ++++
 .../testsuite/bigdecimal/TestAvgForBigInt.scala    |    0
 .../testsuite/bigdecimal/TestBigDecimal.scala      |  224 ++
 .../bigdecimal/TestNullAndEmptyFields.scala        |  114 +
 .../bigdecimal/TestNullAndEmptyFieldsUnsafe.scala  |  116 +
 .../blockprune/BlockPruneQueryTestCase.scala       |    0
 .../CarbonCustomBlockDistributionTest.scala        |  115 +
 .../booleantype/BooleanDataTypesBaseTest.scala     |  174 ++
 .../booleantype/BooleanDataTypesBigFileTest.scala  |  729 +++++++
 .../booleantype/BooleanDataTypesFilterTest.scala   |  436 ++++
 .../booleantype/BooleanDataTypesInsertTest.scala   |  997 +++++++++
 .../booleantype/BooleanDataTypesLoadTest.scala     |  781 +++++++
 .../BooleanDataTypesParameterTest.scala            |  288 +++
 .../booleantype/BooleanDataTypesSortTest.scala     |  145 ++
 .../compress/TestBooleanCompressSuite.scala        |  105 +
 .../compaction/TestHybridCompaction.scala          |  235 ++
 .../TestAlterTableWithTableComment.scala           |    0
 ...bonFileInputFormatWithExternalCarbonTable.scala |    0
 .../TestCreateDDLForComplexMapType.scala           |  531 +++++
 .../createTable/TestCreateExternalTable.scala      |    0
 .../TestCreateHiveTableWithCarbonDS.scala          |    0
 .../createTable/TestCreateTableAsSelect.scala      |  458 ++++
 .../createTable/TestCreateTableIfNotExists.scala   |    0
 .../createTable/TestCreateTableLike.scala          |    0
 .../TestCreateTableWithBlockletSize.scala          |    0
 .../TestCreateTableWithColumnComment.scala         |    0
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |    0
 .../TestCreateTableWithCompactionOptions.scala     |    0
 ...TestCreateTableWithDatabaseNameCaseChange.scala |    0
 .../TestCreateTableWithPageSizeInMb.scala          |    0
 .../createTable/TestCreateTableWithSortScope.scala |    0
 .../TestCreateTableWithSpaceInColumnName.scala     |    0
 .../TestCreateTableWithTableComment.scala          |    0
 .../TestNonTransactionalCarbonTable.scala          |    0
 .../TestNonTransactionalCarbonTableForBinary.scala |  162 ++
 ...TestNonTransactionalCarbonTableForMapType.scala |    0
 ...TestNonTransactionalCarbonTableJsonWriter.scala |    0
 ...nTransactionalCarbonTableWithAvroDataType.scala | 1258 +++++++++++
 ...onTransactionalCarbonTableWithComplexType.scala |    0
 .../createTable/TestRenameTableWithDataMap.scala   |    0
 .../CarbonIndexFileMergeTestCase.scala             |    0
 .../CompactionSupportGlobalSortBigFileTest.scala   |    0
 .../CompactionSupportGlobalSortFunctionTest.scala  |    0
 .../CompactionSupportGlobalSortParameterTest.scala |  582 +++++
 .../CompactionSupportSpecifiedSegmentsTest.scala   |    0
 .../DataCompactionBlockletBoundryTest.scala        |   92 +
 .../DataCompactionBoundaryConditionsTest.scala     |  104 +
 .../DataCompactionCardinalityBoundryTest.scala     |  121 ++
 .../datacompaction/DataCompactionLockTest.scala    |  121 ++
 .../MajorCompactionIgnoreInMinorTest.scala         |  187 ++
 .../MajorCompactionStopsAfterCompaction.scala      |  141 ++
 .../MajorCompactionWithMeasureSortColumns.scala    |    0
 .../TableLevelCompactionOptionTest.scala           |    0
 .../dataload/TestDataLoadPartitionCoalescer.scala  |    0
 .../TestDataLoadWithColumnsMoreThanSchema.scala    |    0
 .../dataload/TestDataLoadWithFileName.scala        |    0
 .../TestDataWithDicExcludeAndInclude.scala         |   98 +
 .../dataload/TestGlobalSortDataLoad.scala          |  491 +++++
 .../testsuite/dataload/TestLoadDataFrame.scala     |    0
 .../dataload/TestLoadDataUseAllDictionary.scala    |    0
 .../TestLoadDataWithDiffTimestampFormat.scala      |  128 ++
 .../TestLoadDataWithFileHeaderException.scala      |    0
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala  |  761 +++++++
 .../TestLoadDataWithHiveSyntaxUnsafe.scala         |  717 ++++++
 ...adDataWithMalformedCarbonCommandException.scala |    0
 .../dataload/TestLoadDataWithNoMeasure.scala       |    0
 .../TestLoadDataWithNotProperInputFile.scala       |    0
 .../spark/testsuite/dataload/TestLoadOptions.scala |    0
 .../dataload/TestLoadTblNameIsKeyword.scala        |    0
 .../dataload/TestLoadWithSortTempCompressed.scala  |    0
 .../dataload/TestRangeColumnDataLoad.scala         |  951 ++++++++
 .../dataload/TestTableLevelBlockSize.scala         |  149 ++
 .../testsuite/dataload/TestTableLoadMinSize.scala  |  170 ++
 .../testsuite/datamap/CGDataMapTestCase.scala      |  572 +++++
 .../testsuite/datamap/DataMapWriterSuite.scala     |    0
 .../testsuite/datamap/FGDataMapTestCase.scala      |  584 +++++
 .../testsuite/datamap/TestDataMapCommand.scala     |    0
 .../testsuite/datamap/TestDataMapStatus.scala      |    0
 .../dataretention/DataRetentionTestCase.scala      |  287 +++
 .../dblocation/DBLocationCarbonTableTestCase.scala |    0
 .../deleteTable/TestDeleteTableNewDDL.scala        |  245 +++
 .../describeTable/TestDescribeTable.scala          |    0
 .../detailquery/AllQueriesSpark2TestCase.scala     |    0
 .../testsuite/detailquery/CastColumnTestCase.scala |  970 +++++++++
 .../ColumnPropertyValidationTestCase.scala         |    0
 .../detailquery/ExpressionWithNullTestCase.scala   |    0
 .../HighCardinalityDataTypesTestCase.scala         |  249 +++
 .../detailquery/IntegerDataTypeTestCase.scala      |    0
 .../detailquery/NoDictionaryColumnTestCase.scala   |    0
 .../RangeFilterAllDataTypesTestCases.scala         |  678 ++++++
 .../detailquery/RangeFilterTestCase.scala          |  634 ++++++
 .../SubqueryWithFilterAndSortTestCase.scala        |    0
 .../ValueCompressionDataTypeTestCase.scala         |    0
 .../DateDataTypeDirectDictionaryTest.scala         |  154 ++
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |  102 +
 ...rectDictionaryWithOffHeapSortDisabledTest.scala |   76 +
 .../DateDataTypeNullDataTest.scala                 |   82 +
 ...TimestampDataTypeDirectDictionaryTestCase.scala |  172 ++
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |   87 +
 .../TimestampDataTypeNullDataTest.scala            |    0
 .../TimestampNoDictionaryColumnCastTestCase.scala  |    0
 .../TimestampNoDictionaryColumnTestCase.scala      |   89 +
 .../filterexpr/AllDataTypesTestCaseFilter.scala    |    0
 .../testsuite/filterexpr/CountStarTestCase.scala   |   75 +
 .../filterexpr/FilterProcessorTestCase.scala       |  403 ++++
 .../filterexpr/GrtLtFilterProcessorTestCase.scala  |  185 ++
 .../filterexpr/IntegerDataTypeTestCase.scala       |    0
 .../NullMeasureValueTestCaseFilter.scala           |   56 +
 .../TestAndEqualFilterEmptyOperandValue.scala      |   74 +
 .../testsuite/filterexpr/TestBetweenFilter.scala   |    0
 .../testsuite/filterexpr/TestGrtLessFilter.scala   |   86 +
 .../filterexpr/TestImplicitFilterExpression.scala  |    0
 .../spark/testsuite/filterexpr/TestInFilter.scala  |  174 ++
 .../testsuite/filterexpr/TestIsNullFilter.scala    |    0
 .../testsuite/filterexpr/TestNotNullFilter.scala   |   60 +
 .../FlatFolderTableLoadingTestCase.scala           |  182 ++
 .../InsertIntoNonCarbonTableTestCase.scala         |    0
 .../testsuite/iud/DeleteCarbonTableTestCase.scala  |  382 ++++
 .../iud/HorizontalCompactionTestCase.scala         |    0
 .../iud/TestInsertAndOtherCommandConcurrent.scala  |    0
 .../iud/TestUpdateAndDeleteWithLargeData.scala     |    0
 .../testsuite/iud/UpdateCarbonTableTestCase.scala  |  898 ++++++++
 .../UpdateCarbonTableTestCaseWithBadRecord.scala   |   68 +
 .../joinquery/AllDataTypesTestCaseJoin.scala       |    0
 .../joinquery/IntegerDataTypeTestCase.scala        |    0
 .../joinquery/JoinWithoutDictionaryColumn.scala    |    0
 .../testsuite/joinquery/OrderByLimitTestCase.scala |    0
 .../LocalDictionarySupportAlterTableTest.scala     |    0
 .../LocalDictionarySupportCreateTableTest.scala    |    0
 .../LocalDictionarySupportLoadTableTest.scala      |  339 +++
 .../longstring/VarcharDataTypesBasicTestCase.scala |    0
 .../NullMeasureValueTestCaseAggregate.scala        |   82 +
 .../spark/testsuite/merge/MergeTestCase.scala      |    0
 .../TestNullValueSerialization.scala               |   98 +
 .../testsuite/partition/TestShowPartitions.scala   |   98 +
 .../partition/TestUpdateForPartitionTable.scala    |    0
 .../sdk/TestSDKWithTransactionalTable.scala        |  114 +
 .../segmentreading/TestSegmentReading.scala        |  387 ++++
 .../TestSegmentReadingForMultiThreading.scala      |    0
 .../testsuite/sortcolumns/TestSortColumns.scala    |  394 ++++
 .../sortcolumns/TestSortColumnsWithUnsafe.scala    |    0
 .../sortexpr/AllDataTypesTestCaseSort.scala        |    0
 .../sortexpr/IntegerDataTypeTestCase.scala         |    0
 .../StandardPartitionBadRecordLoggerTest.scala     |    0
 .../StandardPartitionComplexDataTypeTestCase.scala |    0
 .../StandardPartitionGlobalSortTestCase.scala      | 1070 +++++++++
 .../StandardPartitionTableCleanTestCase.scala      |  191 ++
 .../StandardPartitionTableCompactionTestCase.scala |  229 ++
 .../StandardPartitionTableDropTestCase.scala       |  233 ++
 .../StandardPartitionTableLoadingTestCase.scala    |  588 +++++
 .../StandardPartitionTableOverwriteTestCase.scala  |  326 +++
 .../StandardPartitionTableQueryTestCase.scala      |  547 +++++
 .../windowsexpr/WindowsExprTestCase.scala          |   66 +
 .../spark/util/DataTypeConverterUtilSuite.scala    |    0
 .../util/ExternalColumnDictionaryTestCase.scala    |    0
 .../sql/commands/StoredAsCarbondataSuite.scala     |    0
 .../sql/commands/TestCarbonDropCacheCommand.scala  |    0
 .../sql/commands/TestCarbonShowCacheCommand.scala  |    0
 .../sql/commands/UsingCarbondataSuite.scala        |    0
 .../carbondata/store/SparkCarbonStoreTest.scala    |    0
 .../indexserver/DistributedRDDUtilsTest.scala      |    0
 .../scala/org/apache/spark/SparkCommandSuite.scala |   38 +
 .../carbondata/BadRecordPathLoadOptionTest.scala   |   83 +
 .../spark/carbondata/CarbonDataSourceSuite.scala   |  274 +++
 .../carbondata/DataLoadFailAllTypeSortTest.scala   |  225 ++
 .../spark/carbondata/TableStatusBackupTest.scala   |    0
 .../carbondata/TestStreamingTableOpName.scala      | 2277 ++++++++++++++++++++
 .../carbondata/TestStreamingTableQueryFilter.scala |    0
 .../TestStreamingTableWithLongString.scala         |  649 ++++++
 .../TestStreamingTableWithRowParser.scala          |    0
 .../bucketing/TableBucketingTestCase.scala         |  252 +++
 .../carbondata/commands/SetCommandTestCase.scala   |  138 ++
 .../datatype/NumericDimensionBadRecordTest.scala   |  177 ++
 .../deletetable/DeleteTableTestCase.scala          |   62 +
 .../iud/DeleteCarbonTableSubqueryTestCase.scala    |   63 +
 .../carbondata/query/SubQueryJoinTestSuite.scala   |   75 +
 .../spark/carbondata/query/SubQueryTestSuite.scala |   65 +
 .../carbondata/query/TestNotEqualToFilter.scala    |   90 +
 .../register/TestRegisterCarbonTable.scala         |    0
 .../restructure/AlterTableRevertTestCase.scala     |  106 +
 .../restructure/AlterTableUpgradeSegmentTest.scala |    0
 .../restructure/AlterTableValidationTestCase.scala |  849 ++++++++
 .../vectorreader/AddColumnTestCases.scala          |  697 ++++++
 .../AlterTableColumnRenameTestCase.scala           |  391 ++++
 .../vectorreader/ChangeDataTypeTestCases.scala     |  181 ++
 .../vectorreader/DropColumnTestCases.scala         |  115 +
 .../vectorreader/VectorReaderTestCase.scala        |   79 +
 .../apache/spark/sql/CarbonExtensionSuite.scala    |    0
 .../sql/CarbonGetTableDetailComandTestCase.scala   |    0
 .../spark/sql/GetDataSizeAndIndexSizeTest.scala    |    0
 .../SparkCarbonDataSourceBinaryTest.scala          |  743 +++++++
 .../datasource/SparkCarbonDataSourceTest.scala     | 2237 +++++++++++++++++++
 ...TestCreateTableUsingSparkCarbonFileFormat.scala |  502 +++++
 .../org/apache/spark/sql/common/util/Tags.scala    |    0
 .../command/CarbonTableSchemaCommonSuite.scala     |    0
 .../mutation/CarbonTruncateCommandTest.scala       |   49 +
 .../apache/spark/sql/profiler/ProfilerSuite.scala  |    0
 .../org/apache/spark/util/CarbonCommandSuite.scala |  205 ++
 .../org/apache/spark/util/SparkUtil4Test.scala     |    0
 .../org/apache/spark/util/SparkUtilTest.scala      |    0
 integration/spark2/pom.xml                         |  339 ---
 .../scala/org/apache/spark/sql/CarbonEnv.scala     |  444 ----
 .../spark/sql/execution/strategy/DDLHelper.scala   |  470 ----
 .../spark/sql/execution/strategy/DDLStrategy.scala |  274 ---
 .../parser/CarbonExtensionSpark2SqlParser.scala    |   77 -
 .../sql/parser/CarbonSparkSqlParserUtil.scala      |  747 -------
 .../hive/CarbonInternalMetastore.scala             |  267 ---
 .../spark/sql/test/Spark2TestQueryExecutor.scala   |   89 -
 .../apache/spark/sql/CarbonToSparkAdapter.scala    |  165 --
 .../apache/spark/sql/CarbonToSparkAdapter.scala    |  199 --
 ...org.apache.spark.sql.sources.DataSourceRegister |   17 -
 ...apache.spark.sql.test.TestQueryExecutorRegister |   17 -
 .../BloomCoarseGrainDataMapFunctionSuite.scala     |  841 --------
 .../carbondata/spark/testsuite/TestCarbonCli.scala |   82 -
 .../InsertIntoCarbonTableSpark2TestCase.scala      |   37 -
 .../booleantype/BooleanDataTypesBaseTest.scala     |  174 --
 .../booleantype/BooleanDataTypesBigFileTest.scala  |  729 -------
 .../booleantype/BooleanDataTypesFilterTest.scala   |  435 ----
 .../booleantype/BooleanDataTypesInsertTest.scala   |  997 ---------
 .../booleantype/BooleanDataTypesLoadTest.scala     |  778 -------
 .../BooleanDataTypesParameterTest.scala            |  288 ---
 .../booleantype/BooleanDataTypesSortTest.scala     |  145 --
 .../compress/TestBooleanCompressSuite.scala        |  111 -
 .../segmentreading/TestSegmentReading.scala        |  386 ----
 .../scala/org/apache/spark/SparkCommandSuite.scala |   38 -
 .../carbondata/BadRecordPathLoadOptionTest.scala   |   81 -
 .../spark/carbondata/CarbonDataSourceSuite.scala   |  275 ---
 .../carbondata/DataLoadFailAllTypeSortTest.scala   |  222 --
 .../carbondata/TestStreamingTableOpName.scala      | 2275 -------------------
 .../TestStreamingTableWithLongString.scala         |  649 ------
 .../bucketing/TableBucketingTestCase.scala         |  249 ---
 .../carbondata/commands/SetCommandTestCase.scala   |  138 --
 .../datatype/NumericDimensionBadRecordTest.scala   |  175 --
 .../deletetable/DeleteTableTestCase.scala          |   62 -
 .../iud/DeleteCarbonTableSubqueryTestCase.scala    |   63 -
 .../carbondata/query/SubQueryJoinTestSuite.scala   |   75 -
 .../spark/carbondata/query/SubQueryTestSuite.scala |   65 -
 .../carbondata/query/TestNotEqualToFilter.scala    |   90 -
 .../restructure/AlterTableRevertTestCase.scala     |  106 -
 .../restructure/AlterTableValidationTestCase.scala |  844 --------
 .../vectorreader/AddColumnTestCases.scala          |  695 ------
 .../AlterTableColumnRenameTestCase.scala           |  391 ----
 .../vectorreader/ChangeDataTypeTestCases.scala     |  180 --
 .../vectorreader/DropColumnTestCases.scala         |  113 -
 .../vectorreader/VectorReaderTestCase.scala        |   74 -
 .../spark/sql/common/util/Spark2QueryTest.scala    |   27 -
 .../mutation/CarbonTruncateCommandTest.scala       |   49 -
 .../org/apache/spark/util/CarbonCommandSuite.scala |  212 --
 mv/core/pom.xml                                    |  253 +++
 .../carbondata/mv/extension/MVAnalyzerRule.scala   |    0
 .../mv/extension/MVDataMapProvider.scala           |    0
 .../carbondata/mv/extension/MVExtension.scala      |    0
 .../mv/extension/MVExtensionSqlParser.scala        |    0
 .../apache/carbondata/mv/extension/MVHelper.scala  |    0
 .../apache/carbondata/mv/extension/MVParser.scala  |    0
 .../apache/carbondata/mv/extension/MVUtil.scala    |    0
 .../command/CreateMaterializedViewCommand.scala    |    0
 .../command/DropMaterializedViewCommand.scala      |    0
 .../command/RefreshMaterializedViewCommand.scala   |    0
 .../command/ShowMaterializedViewCommand.scala      |    0
 .../carbondata/mv/rewrite/DefaultMatchMaker.scala  |    0
 .../org/apache/carbondata/mv/rewrite/MVUdf.scala   |    0
 .../apache/carbondata/mv/rewrite/MatchMaker.scala  |    0
 .../apache/carbondata/mv/rewrite/Navigator.scala   |    0
 .../carbondata/mv/rewrite/QueryRewrite.scala       |    0
 .../mv/rewrite/SummaryDatasetCatalog.scala         |    0
 .../org/apache/carbondata/mv/rewrite/Utils.scala   |    0
 .../apache/carbondata/mv/session/MVSession.scala   |    0
 .../mv/session/internal/SessionState.scala         |    0
 .../carbondata/mv/timeseries/Granularity.java      |    0
 .../mv/timeseries/TimeSeriesFunction.scala         |    0
 .../carbondata/mv/timeseries/TimeSeriesUtil.scala  |    0
 .../carbondata/mv/extension/MVOptimizer.scala      |    0
 .../carbondata/mv/extension/MVOptimizer.scala      |    0
 .../mv/plans/ExtractJoinConditionsSuite.scala      |    0
 .../apache/carbondata/mv/plans/IsSPJGHSuite.scala  |    0
 .../mv/plans/LogicalToModularPlanSuite.scala       |    0
 .../carbondata/mv/plans/ModularToSQLSuite.scala    |  147 ++
 .../carbondata/mv/plans/SignatureSuite.scala       |   76 +
 .../mv/plans/Tpcds_1_4_BenchmarkSuite.scala        |    0
 .../carbondata/mv/rewrite/MVCoalesceTestCase.scala |    0
 .../mv/rewrite/MVCountAndCaseTestCase.scala        |    0
 .../carbondata/mv/rewrite/MVCreateTestCase.scala   | 1295 +++++++++++
 .../mv/rewrite/MVExceptionTestCase.scala           |    0
 .../mv/rewrite/MVFilterAndJoinTest.scala           |   71 +
 .../mv/rewrite/MVIncrementalLoadingTestcase.scala  |  650 ++++++
 .../carbondata/mv/rewrite/MVInvalidTestCase.scala  |    0
 .../mv/rewrite/MVMultiJoinTestCase.scala           |    0
 .../carbondata/mv/rewrite/MVRewriteTestCase.scala  |    0
 .../carbondata/mv/rewrite/MVSampleTestCase.scala   |  156 ++
 .../carbondata/mv/rewrite/MVTPCDSTestCase.scala    |  132 ++
 .../carbondata/mv/rewrite/MVTpchTestCase.scala     |  222 ++
 .../mv/rewrite/SelectAllColumnsSuite.scala         |    0
 .../rewrite/SelectSelectExactChildrenSuite.scala   |    0
 .../mv/rewrite/TestAllOperationsOnMV.scala         |  624 ++++++
 .../mv/rewrite/TestPartitionWithMV.scala           |  704 ++++++
 .../carbondata/mv/rewrite/TestSQLSuite.scala       |   97 +
 .../carbondata/mv/rewrite/Tpcds_1_4_Suite.scala    |   83 +
 .../mv/rewrite/matching/TestSQLBatch.scala         |    0
 .../mv/rewrite/matching/TestTPCDS_1_4_Batch.scala  |    0
 .../carbondata/mv/testutil/ModularPlanTest.scala   |    0
 .../carbondata/mv/testutil/TestSQLBatch.scala      |    0
 .../carbondata/mv/testutil/TestSQLBatch2.scala     |    0
 .../mv/testutil/Tpcds_1_4_QueryBatch.scala         |    0
 .../carbondata/mv/testutil/Tpcds_1_4_Tables.scala  |    0
 .../TestMVTimeSeriesCreateDataMapCommand.scala     |    0
 .../timeseries/TestMVTimeSeriesLoadAndQuery.scala  |    0
 .../timeseries/TestMVTimeSeriesQueryRollUp.scala   |    0
 mv/plan/pom.xml                                    |  161 ++
 .../org/apache/carbondata/mv/dsl/package.scala     |    0
 .../mv/expressions/modular/subquery.scala          |    0
 .../mv/plans/modular/AggregatePushDown.scala       |    0
 .../apache/carbondata/mv/plans/modular/Flags.scala |    0
 .../carbondata/mv/plans/modular/Harmonizer.scala   |    0
 .../mv/plans/modular/ModularPatterns.scala         |    0
 .../carbondata/mv/plans/modular/ModularPlan.scala  |    0
 .../modular/ModularPlanSignatureGenerator.scala    |    0
 .../mv/plans/modular/ModularRelation.scala         |    0
 .../carbondata/mv/plans/modular/Modularizer.scala  |    0
 .../mv/plans/modular/basicOperators.scala          |    0
 .../carbondata/mv/plans/modular/queryGraph.scala   |    0
 .../org/apache/carbondata/mv/plans/package.scala   |    0
 .../mv/plans/util/BirdcageOptimizer.scala          |    0
 .../mv/plans/util/Logical2ModularExtractions.scala |    0
 .../plans/util/LogicalPlanSignatureGenerator.scala |    0
 .../apache/carbondata/mv/plans/util/Printers.scala |    0
 .../apache/carbondata/mv/plans/util/SQLBuild.scala |    0
 .../carbondata/mv/plans/util/SQLBuildDSL.scala     |    0
 .../carbondata/mv/plans/util/SQLBuilder.scala      |    0
 .../carbondata/mv/plans/util/Signature.scala       |    0
 .../carbondata/mv/plans/util/TableCluster.scala    |    0
 pom.xml                                            |   71 +-
 .../carbondata/lcm/locks/LocalFileLockTest.java    |    1 -
 python/README.md                                   |    6 +-
 python/pycarbon/tests/__init__.py                  |    2 +-
 .../pycarbon/tests/sdk/test_read_write_carbon.py   |    2 +-
 {store => sdk}/CSDK/CMakeLists.txt                 |    0
 {store => sdk}/CSDK/src/CarbonProperties.cpp       |    0
 {store => sdk}/CSDK/src/CarbonProperties.h         |    0
 {store => sdk}/CSDK/src/CarbonReader.cpp           |    0
 {store => sdk}/CSDK/src/CarbonReader.h             |    0
 {store => sdk}/CSDK/src/CarbonRow.cpp              |    0
 {store => sdk}/CSDK/src/CarbonRow.h                |    0
 {store => sdk}/CSDK/src/CarbonSchemaReader.cpp     |    0
 {store => sdk}/CSDK/src/CarbonSchemaReader.h       |    0
 {store => sdk}/CSDK/src/CarbonWriter.cpp           |    0
 {store => sdk}/CSDK/src/CarbonWriter.h             |    0
 {store => sdk}/CSDK/src/Configuration.cpp          |    0
 {store => sdk}/CSDK/src/Configuration.h            |    0
 {store => sdk}/CSDK/src/Schema.cpp                 |    0
 {store => sdk}/CSDK/src/Schema.h                   |    0
 sdk/CSDK/test/main.cpp                             | 1057 +++++++++
 sdk/sdk/pom.xml                                    |  230 ++
 .../carbondata/sdk/file/ArrowCarbonReader.java     |    0
 .../carbondata/sdk/file/AvroCarbonWriter.java      |    0
 .../carbondata/sdk/file/CSVCarbonWriter.java       |    0
 .../apache/carbondata/sdk/file/CarbonReader.java   |    0
 .../carbondata/sdk/file/CarbonReaderBuilder.java   |    0
 .../carbondata/sdk/file/CarbonSchemaReader.java    |    0
 .../apache/carbondata/sdk/file/CarbonWriter.java   |    0
 .../carbondata/sdk/file/CarbonWriterBuilder.java   |    0
 .../java/org/apache/carbondata/sdk/file/Field.java |    0
 .../carbondata/sdk/file/JsonCarbonWriter.java      |    0
 .../org/apache/carbondata/sdk/file/RowUtil.java    |    0
 .../org/apache/carbondata/sdk/file/Schema.java     |    0
 .../org/apache/carbondata/sdk/file/TestUtil.java   |    0
 .../carbondata/sdk/file/arrow/ArrowConverter.java  |    0
 .../sdk/file/arrow/ArrowFieldWriter.java           |    0
 .../carbondata/sdk/file/arrow/ArrowUtils.java      |    0
 .../carbondata/sdk/file/arrow/ArrowWriter.java     |    0
 .../apache/carbondata/sdk/file/utils/SDKUtil.java  |    0
 .../carbondata/store/CarbonRowReadSupport.java     |    0
 .../org/apache/carbondata/store/CarbonStore.java   |    0
 .../apache/carbondata/store/LocalCarbonStore.java  |    0
 .../carbondata/store/MetaCachedCarbonStore.java    |    0
 .../sdk/src/main/resources/log4j.properties        |    0
 .../carbondata/sdk/file/ArrowCarbonReaderTest.java |    0
 .../carbondata/sdk/file/AvroCarbonWriterTest.java  |    0
 .../carbondata/sdk/file/CSVCarbonWriterTest.java   |    0
 .../carbondata/sdk/file/CarbonReaderTest.java      |    0
 .../sdk/file/CarbonSchemaReaderTest.java           |    0
 .../sdk/file/ConcurrentAvroSdkWriterTest.java      |    0
 .../sdk/file/ConcurrentSdkReaderTest.java          |    0
 .../sdk/file/ConcurrentSdkWriterTest.java          |    0
 .../org/apache/carbondata/sdk/file/ImageTest.java  |    0
 .../org/apache/carbondata/sdk/file/MinMaxTest.java |    0
 .../sdk/file/MultithreadSDKBlockletReaderTest.java |    0
 .../carbondata/store/LocalCarbonStoreTest.java     |    0
 .../org/apache/carbondata/util/BinaryUtil.java     |    0
 .../src/test/resources/image/carbondatalogo.jpg    |  Bin
 .../image/flowers/10686568196_b1915544a8.jpg       |  Bin
 .../image/flowers/10686568196_b1915544a8.txt       |    0
 .../image/flowers/10712722853_5632165b04.jpg       |  Bin
 .../image/flowers/10712722853_5632165b04.txt       |    0
 .../flowers/subfolder/10841136265_af473efc60.jpg   |  Bin
 .../flowers/subfolder/10841136265_af473efc60.txt   |    0
 .../src/test/resources/image/voc/2007_000027.jpg   |  Bin
 .../src/test/resources/image/voc/2007_000027.xml   |    0
 .../src/test/resources/image/voc}/2007_000032.jpg  |  Bin
 .../src/test/resources/image/voc/2007_000032.xml   |    0
 .../src/test/resources/image/voc}/2007_000033.jpg  |  Bin
 .../src/test/resources/image/voc/2007_000033.xml   |    0
 .../src/test/resources/image/voc/2007_000039.jpg   |  Bin
 .../src/test/resources/image/voc/2007_000039.xml   |    0
 .../src/test/resources/image/voc/2009_001444.jpg   |  Bin
 .../src/test/resources/image/voc/2009_001444.xml   |    0
 .../image/vocForSegmentationClass}/2007_000032.jpg |  Bin
 .../image/vocForSegmentationClass/2007_000032.png  |  Bin
 .../image/vocForSegmentationClass}/2007_000033.jpg |  Bin
 .../image/vocForSegmentationClass/2007_000033.png  |  Bin
 .../image/vocForSegmentationClass/2007_000042.jpg  |  Bin
 .../image/vocForSegmentationClass/2007_000042.png  |  Bin
 secondary_index/pom.xml                            |  244 ---
 .../TestCreateIndexWithLoadAndCompaction.scala     |  267 ---
 .../apache/spark/util/TestCarbonSegmentUtil.scala  |  313 ---
 store/CSDK/test/main.cpp                           | 1057 ---------
 store/sdk/pom.xml                                  |  230 --
 tools/cli/pom.xml                                  |    2 +-
 1380 files changed, 62857 insertions(+), 63532 deletions(-)
 delete mode 100644 datamap/bloom/pom.xml
 delete mode 100644 datamap/examples/pom.xml
 delete mode 100644 datamap/lucene/pom.xml
 delete mode 100644 datamap/mv/core/pom.xml
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
 delete mode 100644 datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
 delete mode 100644 datamap/mv/plan/pom.xml
 delete mode 100644 docs/datamap-developer-guide.md
 delete mode 100644 docs/datamap/datamap-management.md
 create mode 100644 docs/index-developer-guide.md
 rename docs/{datamap/bloomfilter-datamap-guide.md => index/bloomfilter-index-guide.md} (100%)
 create mode 100644 docs/index/index-management.md
 rename docs/{datamap/lucene-datamap-guide.md => index/lucene-index-guide.md} (100%)
 rename docs/{datamap/mv-datamap-guide.md => index/mv-guide.md} (100%)
 create mode 100644 examples/spark/pom.xml
 rename examples/{spark2 => spark}/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java (100%)
 rename examples/{spark2 => spark}/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java (100%)
 rename examples/{spark2 => spark}/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java (100%)
 rename examples/{spark2 => spark}/src/main/java/org/apache/carbondata/examples/sdk/SDKS3SchemaReadExample.java (100%)
 create mode 100644 examples/spark/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
 rename examples/{spark2 => spark}/src/main/resources/Test_Data1.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/complexdata.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/data.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/data1.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/dataSample.csv (100%)
 rename {integration/spark-common-test/src/test => examples/spark/src/main}/resources/dimSample.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/factSample.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/log4j.properties (100%)
 rename examples/{spark2 => spark}/src/main/resources/sample.csv (100%)
 rename examples/{spark2 => spark}/src/main/resources/streamSample.csv (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/benchmark/Query.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala (100%)
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala (100%)
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala (100%)
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/MVExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/S3Example.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
 rename examples/{spark2 => spark}/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala (100%)
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
 create mode 100644 examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
 create mode 100644 examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
 delete mode 100644 examples/spark2/pom.xml
 delete mode 100644 examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
 delete mode 100644 examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
 delete mode 100644 examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
 create mode 100644 index/bloom/pom.xml
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCacheKeyValue.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapDistributable.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapModel.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/carbondata/datamap/bloom/DataConvertUtil.java (100%)
 rename {datamap => index}/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java (100%)
 create mode 100644 index/examples/pom.xml
 rename {datamap => index}/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/BlockletMinMax.java (100%)
 rename {datamap => index}/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java (100%)
 rename {datamap => index}/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexBlockDetails.java (100%)
 rename {datamap => index}/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java (100%)
 rename {datamap => index}/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java (100%)
 rename {datamap => index}/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala (100%)
 create mode 100644 index/lucene/pom.xml
 rename {datamap => index}/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java (100%)
 rename {datamap => index}/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapDistributable.java (100%)
 rename {datamap => index}/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java (100%)
 rename {datamap => index}/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java (100%)
 rename {datamap => index}/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java (100%)
 rename {datamap => index}/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java (100%)
 create mode 100644 index/secondary-index/pom.xml
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithSecondaryIndex.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithSecondaryIndex.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala (100%)
 create mode 100644 index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithSecondaryIndex.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithSecondaryIndex.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexForORFilterPushDown.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithAggQueries.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIUD.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIndexOnFirstColumnAndSortColumns.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithLocalDictionary.scala (100%)
 rename {secondary_index => index/secondary-index}/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithUnsafeColumnPage.scala (100%)
 create mode 100644 index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
 delete mode 100644 integration/spark-common-test/pom.xml
 delete mode 100644 integration/spark-common-test/src/test/resources/structofarray.csv
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/compaction/TestHybridCompaction.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateDDLForComplexMapType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableForBinary.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithAvroDataType.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionStopsAfterCompaction.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataWithDicExcludeAndInclude.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxUnsafe.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestRangeColumnDataLoad.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestTableLevelBlockSize.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestTableLoadMinSize.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/CastColumnTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithOffHeapSortDisabledTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/CountStarTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/FilterProcessorTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/GrtLtFilterProcessorTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/NullMeasureValueTestCaseFilter.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestAndEqualFilterEmptyOperandValue.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestGrtLessFilter.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestInFilter.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestNotNullFilter.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/flatfolder/FlatFolderTableLoadingTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCaseWithBadRecord.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportLoadTableTest.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/measurenullvalue/NullMeasureValueTestCaseAggregate.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/nullvalueserialization/TestNullValueSerialization.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestShowPartitions.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sdk/TestSDKWithTransactionalTable.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionGlobalSortTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCleanTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableDropTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableOverwriteTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala
 delete mode 100644 integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/windowsexpr/WindowsExprTestCase.scala
 delete mode 100644 integration/spark-common/pom.xml
 delete mode 100644 integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
 delete mode 100644 integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
 delete mode 100644 integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
 delete mode 100644 integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
 delete mode 100644 integration/spark-datasource/pom.xml
 delete mode 100644 integration/spark-datasource/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
 delete mode 100644 integration/spark-datasource/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java
 delete mode 100644 integration/spark-datasource/src/test/resources/structofarray.csv
 delete mode 100644 integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceBinaryTest.scala
 delete mode 100644 integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceTest.scala
 delete mode 100644 integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala
 delete mode 100644 integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
 create mode 100644 integration/spark/pom.xml
 rename integration/{spark2 => spark}/src/main/java/org/apache/carbondata/datamap/DataMapManager.java (100%)
 rename integration/{spark2 => spark}/src/main/java/org/apache/carbondata/datamap/IndexDataMapProvider.java (100%)
 rename integration/{spark-common => spark}/src/main/java/org/apache/carbondata/spark/exception/ProcessMetaDataException.java (100%)
 rename integration/{spark-common => spark}/src/main/java/org/apache/carbondata/spark/load/DecimalSerializableComparator.java (100%)
 rename integration/{spark2 => spark}/src/main/java/org/apache/carbondata/spark/readsupport/SparkGenericRowReadSupportImpl.java (100%)
 rename integration/{spark2 => spark}/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java (100%)
 rename integration/{spark-common => spark}/src/main/java/org/apache/carbondata/spark/util/Util.java (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/api/CarbonStore.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/carbondata/converter/SparkDataTypeConverterImpl.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/datamap/CarbonMergeBloomIndexFilesRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/datamap/TextMatchUDF.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/CacheEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/CarbonInitEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/CleanFilesEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/CreateCarbonRelationEvent.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/CreateDatabaseEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/DataMapEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/DeleteSegmentEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/DropDataMapEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/DropTableEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/Events.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/IUDEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/IndexServerEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/LookupRelationEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/RefreshTableEvents.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/events/exception/EventExceptions.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/geo/GeoUtils.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/geo/InPolygonUDF.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/DataMapJobs.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/DistributedCountRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/DistributedPruneRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/DistributedRDDUtils.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/DistributedShowCacheRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/InvalidateSegmentCacheRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/indexserver/SegmentPruneRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/CarbonColumnValidator.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/InitInputMetrics.java (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/KeyVal.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/StreamingOption.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/load/CsvRDDHelper.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessBuilderOnSpark.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/load/GlobalSortHelper.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeltaRowScanRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropPartitionRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonIUDMergerRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonSparkPartition.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/rdd/CompactionFactory.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/thriftserver/CarbonThriftServer.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/util/CarbonSparkUtil.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/spark/util/DataGenerator.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/spark/util/DataTypeConverterUtil.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapperDirect.java (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/streaming/CarbonSparkStreamingListener.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/streaming/CarbonStreamSparkStreaming.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/streaming/CarbonStreamingQueryListener.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/carbondata/streaming/StreamSinkFactory.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/CarbonInputMetrics.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/DataSkewRangePartitioner.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/rdd/DataLoadCoalescedRDD.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/rdd/DataLoadPartitionCoalescer.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonDataFrameWriter.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/CarbonDictionaryWrapper.java (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonExpressions.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonExtensions.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonSession.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonSource.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonSparkStreamingFactory.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CarbonUtils.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/CarbonVectorProxy.java (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/ColumnVectorFactory.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/CustomDeterministicExpression.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/EnvHelper.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/SQLConf.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndex.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndexReplaceRule.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/SparkCarbonFileFormat.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/readsupport/SparkUnsafeRowReadSuport.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/catalyst/AbstractCarbonSparkSQLParser.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/catalyst/CarbonParserUtil.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/catalyst/analysis/EmptyRule.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/CastExpressionOptimization.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonDropCacheCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonShowCacheCommand.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapRebuildCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCliCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByIdCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByLoadDateCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteStageFilesCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertFromStageCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoHadoopFsRelationCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoWithDf.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadParams.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowLoadsCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompactionException.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/IUDCommonUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetException.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/HistoryTableLoadHelper.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeDataSetBuilder.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeProjection.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MutationAction.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/TranxManager.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/interfaces.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/package.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableAddHivePartitionCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropHivePartitionCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableSetCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableUnsetCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonCreateStreamCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonDropStreamCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonShowStreamsCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateDataSourceTableCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableLikeCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonExplainCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowCreateTableCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonPlanHelper.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/strategy/DMLHelper.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/strategy/MixedFormatHandler.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/strategy/PushDownHelper.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/execution/strategy/StreamingTableStrategy.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonAnalyzer.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonMVRules.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonRelation.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalog.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonSessionUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonSqlAstBuilder.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CarbonSqlConf.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/CreateCarbonSourceTableAsSelectCommand.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/hive/DistributionUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/cli/CarbonSQLCLIDriver.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonResetCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/listeners/DropCacheEventListeners.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/listeners/MVListeners.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/listeners/PrePrimingListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/listeners/ShowCacheEventListeners.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/optimizer/CarbonIUDRule.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/optimizer/CarbonUDFTransformRule.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSqlParser.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/profiler/Profiler.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/profiler/ProfilerListener.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/BlockletDataMapDetailsWithSchema.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/CarbonBlockLoaderHelper.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/DistributableBlockletDataMapLoader.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/SparkBlockletDataMapLoaderJob.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/command/DropIndexCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/command/RegisterIndexTableCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/command/SILoadCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/command/SIRebuildSegmentCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/command/ShowIndexesCommand.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableColumnRenameEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableCompactionPostEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableDropColumnEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableMergeIndexSIEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableRenameEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/CleanFilesPostEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/CreateCarbonRelationEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteFromTableEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByDateListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByIdListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/DropCacheSIEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/LoadSIEvents.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIDropEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListenerForFailedSegments.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIRefreshEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/ShowCacheSIEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/events/UpdateTablePreEventListener.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/exception/IndexTableExistException.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/exception/SecondaryIndexException.java (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/joins/BroadCastSIFilterPushJoin.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/load/Compactor.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/load/RowComparatorWithOutKettle.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonCostBasedOptimizer.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSITransformationRule.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/query/CarbonSecondaryIndexExecutor.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/query/SecondaryIndexQueryResultProcessor.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSIRebuildRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSecondaryIndexRDD.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/SecondaryIndexCreator.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/util/CarbonInternalScalaUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/util/FileInternalUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/util/IndexTableUtil.java (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/util/InternalKeyVal.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/sql/secondaryindex/util/SecondaryIndexUtil.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/test/util/CarbonFunSuite.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/test/util/PlanTest.scala (100%)
 create mode 100644 integration/spark/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/util/CarbonException.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/util/CarbonMetastoreTypes.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/sql/util/SparkSQLUtil.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/sql/util/SparkTypeConverter.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/AlterTableUtil.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/CleanFiles.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/Compaction.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/util/FileUtils.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/MergeIndexUtil.scala (100%)
 rename integration/{spark-common => spark}/src/main/scala/org/apache/spark/util/ScalaCompilerUtil.scala (100%)
 rename integration/{spark-datasource => spark}/src/main/scala/org/apache/spark/util/SparkUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/TableAPIUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/scala/org/apache/spark/util/TableLoader.scala (100%)
 rename integration/{spark-common => spark}/src/main/spark2.3/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala (100%)
 rename integration/{spark2 => spark}/src/main/spark2.3/org/apache/spark/sql/CarbonBoundReference.scala (100%)
 create mode 100644 integration/spark/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
 rename integration/{spark2 => spark}/src/main/spark2.3/org/apache/spark/sql/MixedFormatHandlerUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/spark2.3/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala (100%)
 rename integration/{spark2 => spark}/src/main/spark2.3/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala (100%)
 rename integration/{spark-common => spark}/src/main/spark2.4/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala (100%)
 rename integration/{spark2 => spark}/src/main/spark2.4/org/apache/spark/sql/CarbonBoundReference.scala (100%)
 create mode 100644 integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
 rename integration/{spark2 => spark}/src/main/spark2.4/org/apache/spark/sql/MixedFormatHandlerUtil.scala (100%)
 rename integration/{spark2 => spark}/src/main/spark2.4/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala (100%)
 rename integration/{spark2 => spark}/src/main/spark2.4/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala (100%)
 create mode 100644 integration/spark/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
 create mode 100644 integration/spark/src/resources/META-INF/services/org.apache.spark.sql.test.TestQueryExecutorRegister
 rename integration/{spark-common-test => spark}/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java (100%)
 rename integration/{spark2 => spark}/src/test/java/org/apache/carbondata/stream/CarbonStreamRecordReaderTest.java (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/100_olap.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/10dim_4msr.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/32000char.csv (100%)
 rename integration/{spark-datasource => spark}/src/test/resources/Array.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/T_Hive1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/bad_record.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/badrecord.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/comp1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/comp2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/comp3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/comp4.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/dest.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/negativevalue.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/other.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/sample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/sample_updated.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/source2.csv (100%)
 rename integration/{spark-common-test/src/test/resources/secindex => spark/src/test/resources/IUD}/source3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/IUD/update01.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/OLDFORMATTABLE.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/OLDFORMATTABLEHIVE.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/Struct.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/StructofStruct.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/Test_Data1_Logrithmic.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_double1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_double2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_double3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_double4.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_int1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_int2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/adap_int3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/alldatatypeforpartition.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/alldictionary/complex/20160423/1400_1405/complex.dictionary (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/alldictionary/sample/20160423/1400_1405/sample.dictionary (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/array1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/arrayColumnEmpty.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/avgTest.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/bigtab.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/bigtabbad.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/complexdata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/datasample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/dummy.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/dummy2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/emptyTimeStampValue.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/emptyValues.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/insufficientColumns.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/badrecords/seriazableValue.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/bigIntData.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/bigIntDataWithHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/bigIntDataWithoutHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/big_decimal_without_header.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/big_int_Decimal.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/binaryDataBase64.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/binaryDataHex.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/binaryStringNullData.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/binarystringdata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/binarystringdata2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/binarystringdatawithHead.csv (100%)
 rename integration/{spark2 => spark}/src/test/resources/bool/supportBoolean.csv (100%)
 rename integration/{spark2 => spark}/src/test/resources/bool/supportBooleanBadRecords.csv (100%)
 rename integration/{spark2 => spark}/src/test/resources/bool/supportBooleanDifferentFormat.csv (100%)
 rename integration/{spark2 => spark}/src/test/resources/bool/supportBooleanOnlyBoolean.csv (100%)
 rename integration/{spark2 => spark}/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv (100%)
 rename integration/{spark2 => spark}/src/test/resources/bool/supportBooleanWithFileHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/channelsId.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/character_carbon.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/character_hive.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/columndictionary/country.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/columndictionary/name.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/comment.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compaction1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compaction1_forhive.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compaction2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compaction3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compactionIUD1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compactionIUD2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compactionIUD3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compactionIUD4.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compactioncard2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/compactioncard2_forhive.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/compaction/nodictionary_compaction.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexTypeDecimal.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexTypeDecimalNested.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexTypeDecimalNestedHive.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexbinary.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexdata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexdata1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexdata2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexdata3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexdatareordered.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complexdatastructextra.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complextypediffentcolheaderorder.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complextypesample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/complextypespecialchardelimiter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data2_DiffTimeFormat.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataIncrement.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataWithEmptyRows.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataWithNegativeValues.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataWithNullFirstLine.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataWithSingleQuote.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_alltypes.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_beyond68yrs.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_big.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_partition_badrecords.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_sort.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_timestamp.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_withCAPSHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_withMixedHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_with_all_types.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/data_with_special_char.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datadelimiter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datanullmeasurecol.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataretention1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataretention11.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataretention2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dataretention3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datasample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datasamplecomplex.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datasamplefordate.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datasamplenull.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datasingleCol.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datasingleComplexCol.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithNegeativewithoutHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithNegtiveNumber.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithbackslash.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithblanklines.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithcomplexspecialchar.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithescapecharacter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithmaxbigint.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithmaxinteger.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithmaxminbigint.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithmaxmininteger.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithminbigint.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithmininteger.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithnullmeasure.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithnullmsrs.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithoutheader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datawithspecialcharacter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/datedatafile.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dblocation/test.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/decimalBoundaryDataCarbon.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/decimalBoundaryDataHive.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/decimalData.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/decimalDataWithHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/decimalDataWithoutHeader.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/decimal_int_range.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/deviceInformationId.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/deviceInformationId2.csv (100%)
 rename {examples/spark2/src/main => integration/spark/src/test}/resources/dimSample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/dimTableSample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_AdaptiveFloating_byte.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_AdaptiveFloating_int.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_AdaptiveFloating_short.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_AdaptiveFloating_short_int.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_byte.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_int.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_long.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_short.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/double/data_notitle_short_int.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/emp.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/emptyDimensionData.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/emptyDimensionDataHive.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/emptylines.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/emptyrow/csvwithonlyspacechar.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/emptyrow/emptyRows.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/encoding_types.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/betweenFilter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/datagrtlrt.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/datawithnull.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/datawithoutnull.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/emp2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/emp2allnull.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/emp2nonull.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/notEqualToFilter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/filter/notNullFilter.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/floatSample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/geodata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/globalsort/sample1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/globalsort/sample2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/globalsort/sample3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/hiverangenodictionarycompare.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/invalidMeasures.csv (100%)
 rename integration/{spark-datasource => spark}/src/test/resources/j2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/join/data1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/join/data2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/join/emp.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/join/employee.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/join/mgr.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/join/mobile.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/PrimitiveTypeWithNull.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/StructOfAllTypes.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/allPrimitiveType.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/allPrimitiveTypeBadRecord.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/arrayOfStructOfStruct.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/arrayOfarrayOfarrayOfStruct.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/MultipleRowSingleLineJson.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/SingleRowSingleLineJson.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/MultipleRowMultipleLineJsonWithRecordIdentifier.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowMultipleLineJsonWithRecordIdentifier.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowSingleLineJsonWithRecordIdentifier.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeMultipleRows.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeSingleArray.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/schema/StructOfAllTypes.avsc (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/schema/arrayOfStructOfStruct.avsc (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/jsonFiles/schema/arrayOfarrayOfarrayOfStruct.avsc (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/lessthandatacolumndata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/.invisibilityfile (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/_SUCCESS (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/data.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/emptyfile.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/nestedfolder1/data.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/nestedfolder1/data1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/nestedfolder1/nestedfolder2/data.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/loadMultiFiles/non-csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/localdictionary.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/locationInfoActiveCountry.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/mac.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/measureinsertintotest.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/mobileimei.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/mv_sampledata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/newsample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/noneCsvFormat.cs (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/nontransactional.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/nontransactional1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/nullSample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/nullandnonparsableValue.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/nullmeasurevalue.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/nullvalueserialization.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/numeric_column_invalid_values.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/oscon_10.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/outofrange.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/overwriteTable1_noRecord.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/overwriteTable1_someRecord.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/overwriteTable2_noRecord.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/overwriteTable2_someRecord.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/partData.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/partition_data.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/partition_data_example.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/predefdic/allpredefdictionary.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/predefdic/data3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/predefdic/dicfilepath.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/products.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/range_column/dataskew.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/rangedata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/rangedatasample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/rangenodictionarycompare.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data4.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data5.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data6.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data7.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/restructure/data_2000.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sales_data.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sample (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sample.csv.bz2 (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sample.csv.gz (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sampleComplex.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sample_withDelimiter017.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/IUD/sample_1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/IUD/sample_2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/data_10000.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/datafile_100.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/dest.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/dest1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/dest2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/dest3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/firstunique.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/index.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/secondaryIndexLikeTest.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/secindex/secondunique.csv (100%)
 rename integration/{spark-common-test/src/test/resources/IUD => spark/src/test/resources/secindex}/source3.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/seq_20Records.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/shortintboundary.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/shortolap.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sort_columns/alldatatype1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/sort_columns/alldatatype2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/source.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/source_without_header.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/streamSample.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/streamSample_with_long_string.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/struct_all.csv (100%)
 create mode 100644 integration/spark/src/test/resources/structofarray.csv
 rename integration/{spark-common-test => spark}/src/test/resources/structusingstruct.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/temp/data1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/test.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/testBigInt_boundary_value.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/testShortAndIntDataType.csv (100%)
 rename integration/{spark-datasource => spark}/src/test/resources/test_json.json (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/timeStampFormatData1.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/timeStampFormatData2.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/timeseriestest.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/timestamp.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/timestampdata.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/timestampdatafile.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/tpch/customers.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/tpch/lineitem.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/tpch/nation.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/tpch/orders.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/tpch/region.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/tpch/supplier.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/unicodechar.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/uniq.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/uniqwithoutheader.csv (100%)
 rename integration/{spark-datasource => spark}/src/test/resources/vardhandaterestruct.csv (100%)
 rename integration/{spark-common-test => spark}/src/test/resources/verticalDelimitedData.csv (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapSuite.scala (100%)
 rename integration/{spark2 => spark}/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapTestUtil.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapSuite.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/geo/GeoTest.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeHeapColumnPageForComplexDataType.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAllComplexDataType.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeWithBigArray.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/MultiFilesDataLoagdingTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithAutoLoadMerge.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinBigInt.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithNullMeasures.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithUnsafeMemory.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithYarnLocalDirs.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestSkipEmptyLines.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/ArrayDataTypeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/DoubleDataTypeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/FloatDataTypeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/MapDataTypeTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableCompactionLevelThreshold.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/compaction/TestHybridCompaction.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestAlterTableWithTableComment.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateDDLForComplexMapType.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateHiveTableWithCarbonDS.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableAsSelect.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableIfNotExists.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableLike.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithBlockletSize.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithColumnComment.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithColumnMetCacheAndCacheLevelProperty.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithCompactionOptions.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithDatabaseNameCaseChange.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithPageSizeInMb.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSpaceInColumnName.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithTableComment.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableForBinary.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableForMapType.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableJsonWriter.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithAvroDataType.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestRenameTableWithDataMap.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CarbonIndexFileMergeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortBigFileTest.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortFunctionTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportSpecifiedSegmentsTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionLockTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionStopsAfterCompaction.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionWithMeasureSortColumns.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/TableLevelCompactionOptionTest.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadPartitionCoalescer.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithFileName.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataWithDicExcludeAndInclude.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataUseAllDictionary.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithDiffTimestampFormat.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxUnsafe.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNoMeasure.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadOptions.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadTblNameIsKeyword.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadWithSortTempCompressed.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestRangeColumnDataLoad.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestTableLevelBlockSize.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestTableLoadMinSize.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/DataMapWriterSuite.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapCommand.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/AllQueriesSpark2TestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/CastColumnTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ExpressionWithNullTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/IntegerDataTypeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/NoDictionaryColumnTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterAllDataTypesTestCases.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/RangeFilterTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SubqueryWithFilterAndSortTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ValueCompressionDataTypeTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithNoDictTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeDirectDictionaryWithOffHeapSortDisabledTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/DateDataTypeNullDataTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnCastTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampNoDictionaryColumnTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/CountStarTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/FilterProcessorTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/GrtLtFilterProcessorTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/IntegerDataTypeTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/NullMeasureValueTestCaseFilter.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestAndEqualFilterEmptyOperandValue.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestBetweenFilter.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestGrtLessFilter.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestImplicitFilterExpression.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestInFilter.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestIsNullFilter.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/TestNotNullFilter.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/flatfolder/FlatFolderTableLoadingTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/insertQuery/InsertIntoNonCarbonTableTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/DeleteCarbonTableTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/iud/HorizontalCompactionTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestUpdateAndDeleteWithLargeData.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCaseWithBadRecord.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/joinquery/IntegerDataTypeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/joinquery/JoinWithoutDictionaryColumn.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/joinquery/OrderByLimitTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportAlterTableTest.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportLoadTableTest.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/longstring/VarcharDataTypesBasicTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/measurenullvalue/NullMeasureValueTestCaseAggregate.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/merge/MergeTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/nullvalueserialization/TestNullValueSerialization.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestShowPartitions.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestUpdateForPartitionTable.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sdk/TestSDKWithTransactionalTable.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/segmentreading/TestSegmentReading.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/segmentreading/TestSegmentReadingForMultiThreading.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/IntegerDataTypeTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionBadRecordLoggerTest.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionComplexDataTypeTestCase.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionGlobalSortTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCleanTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableCompactionTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableDropTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableOverwriteTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableQueryTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/windowsexpr/WindowsExprTestCase.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/spark/util/DataTypeConverterUtilSuite.scala (100%)
 rename integration/{spark2 => spark}/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/sql/commands/StoredAsCarbondataSuite.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/sql/commands/TestCarbonDropCacheCommand.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/sql/commands/TestCarbonShowCacheCommand.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/carbondata/sql/commands/UsingCarbondataSuite.scala (100%)
 rename integration/{spark2 => spark}/src/test/scala/org/apache/carbondata/store/SparkCarbonStoreTest.scala (100%)
 rename integration/{spark2 => spark}/src/test/scala/org/apache/indexserver/DistributedRDDUtilsTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/SparkCommandSuite.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/carbondata/TableStatusBackupTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/carbondata/TestStreamingTableQueryFilter.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithLongString.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithRowParser.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/deletetable/DeleteTableTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/iud/DeleteCarbonTableSubqueryTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/query/SubQueryJoinTestSuite.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/query/SubQueryTestSuite.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/query/TestNotEqualToFilter.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableRevertTestCase.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableUpgradeSegmentTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AlterTableColumnRenameTestCase.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/sql/CarbonExtensionSuite.scala (100%)
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala (100%)
 rename integration/{spark2 => spark}/src/test/scala/org/apache/spark/sql/GetDataSizeAndIndexSizeTest.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceBinaryTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceTest.scala
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestCreateTableUsingSparkCarbonFileFormat.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/spark/sql/common/util/Tags.scala (100%)
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/spark/sql/execution/command/CarbonTableSchemaCommonSuite.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommandTest.scala
 rename integration/{spark-common => spark}/src/test/scala/org/apache/spark/sql/profiler/ProfilerSuite.scala (100%)
 create mode 100644 integration/spark/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
 rename integration/{spark-common-test => spark}/src/test/scala/org/apache/spark/util/SparkUtil4Test.scala (100%)
 rename integration/{spark-common => spark}/src/test/scala/org/apache/spark/util/SparkUtilTest.scala (100%)
 delete mode 100644 integration/spark2/pom.xml
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
 delete mode 100644 integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
 delete mode 100644 integration/spark2/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
 delete mode 100644 integration/spark2/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
 delete mode 100644 integration/spark2/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
 delete mode 100644 integration/spark2/src/resources/META-INF/services/org.apache.spark.sql.test.TestQueryExecutorRegister
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/segmentreading/TestSegmentReading.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/SparkCommandSuite.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOpName.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableWithLongString.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/deletetable/DeleteTableTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/iud/DeleteCarbonTableSubqueryTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/query/SubQueryJoinTestSuite.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/query/SubQueryTestSuite.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/query/TestNotEqualToFilter.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableRevertTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AlterTableColumnRenameTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/sql/common/util/Spark2QueryTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommandTest.scala
 delete mode 100644 integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
 create mode 100644 mv/core/pom.xml
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVAnalyzerRule.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVDataMapProvider.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtension.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtensionSqlParser.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVHelper.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVParser.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/MVUtil.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/command/CreateMaterializedViewCommand.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/command/DropMaterializedViewCommand.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/command/RefreshMaterializedViewCommand.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/extension/command/ShowMaterializedViewCommand.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/MVUdf.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchMaker.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/Navigator.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/QueryRewrite.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/rewrite/Utils.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/session/MVSession.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/session/internal/SessionState.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/timeseries/Granularity.java (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesFunction.scala (100%)
 rename {datamap/mv => mv}/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesUtil.scala (100%)
 rename {datamap/mv => mv}/core/src/main/spark2.3/org/apache/carbondata/mv/extension/MVOptimizer.scala (100%)
 rename {datamap/mv => mv}/core/src/main/spark2.4/org/apache/carbondata/mv/extension/MVOptimizer.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/plans/ExtractJoinConditionsSuite.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/plans/IsSPJGHSuite.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/plans/LogicalToModularPlanSuite.scala (100%)
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/plans/Tpcds_1_4_BenchmarkSuite.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala (100%)
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala (100%)
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala (100%)
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectSelectExactChildrenSuite.scala (100%)
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
 create mode 100644 mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestSQLBatch.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestTPCDS_1_4_Batch.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch2.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_QueryBatch.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_Tables.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala (100%)
 rename {datamap/mv => mv}/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesQueryRollUp.scala (100%)
 create mode 100644 mv/plan/pom.xml
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/dsl/package.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/expressions/modular/subquery.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/AggregatePushDown.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Flags.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Harmonizer.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPatterns.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlan.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlanSignatureGenerator.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularRelation.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Modularizer.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/basicOperators.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/queryGraph.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/package.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/BirdcageOptimizer.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Logical2ModularExtractions.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/LogicalPlanSignatureGenerator.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Printers.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuild.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuildDSL.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuilder.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Signature.scala (100%)
 rename {datamap/mv => mv}/plan/src/main/scala/org/apache/carbondata/mv/plans/util/TableCluster.scala (100%)
 rename {store => sdk}/CSDK/CMakeLists.txt (100%)
 rename {store => sdk}/CSDK/src/CarbonProperties.cpp (100%)
 rename {store => sdk}/CSDK/src/CarbonProperties.h (100%)
 rename {store => sdk}/CSDK/src/CarbonReader.cpp (100%)
 rename {store => sdk}/CSDK/src/CarbonReader.h (100%)
 rename {store => sdk}/CSDK/src/CarbonRow.cpp (100%)
 rename {store => sdk}/CSDK/src/CarbonRow.h (100%)
 rename {store => sdk}/CSDK/src/CarbonSchemaReader.cpp (100%)
 rename {store => sdk}/CSDK/src/CarbonSchemaReader.h (100%)
 rename {store => sdk}/CSDK/src/CarbonWriter.cpp (100%)
 rename {store => sdk}/CSDK/src/CarbonWriter.h (100%)
 rename {store => sdk}/CSDK/src/Configuration.cpp (100%)
 rename {store => sdk}/CSDK/src/Configuration.h (100%)
 rename {store => sdk}/CSDK/src/Schema.cpp (100%)
 rename {store => sdk}/CSDK/src/Schema.h (100%)
 create mode 100644 sdk/CSDK/test/main.cpp
 create mode 100644 sdk/sdk/pom.xml
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/ArrowCarbonReader.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/CSVCarbonWriter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/JsonCarbonWriter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/RowUtil.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowConverter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowFieldWriter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowUtils.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowWriter.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/sdk/file/utils/SDKUtil.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/store/CarbonRowReadSupport.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java (100%)
 rename {store => sdk}/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java (100%)
 rename {store => sdk}/sdk/src/main/resources/log4j.properties (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonSchemaReaderTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentAvroSdkWriterTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkReaderTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkWriterTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/ImageTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/MinMaxTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/sdk/file/MultithreadSDKBlockletReaderTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java (100%)
 rename {store => sdk}/sdk/src/test/java/org/apache/carbondata/util/BinaryUtil.java (100%)
 rename {store => sdk}/sdk/src/test/resources/image/carbondatalogo.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.txt (100%)
 rename {store => sdk}/sdk/src/test/resources/image/flowers/10712722853_5632165b04.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/flowers/10712722853_5632165b04.txt (100%)
 rename {store => sdk}/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.txt (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2007_000027.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2007_000027.xml (100%)
 rename {store/sdk/src/test/resources/image/vocForSegmentationClass => sdk/sdk/src/test/resources/image/voc}/2007_000032.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2007_000032.xml (100%)
 rename {store/sdk/src/test/resources/image/vocForSegmentationClass => sdk/sdk/src/test/resources/image/voc}/2007_000033.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2007_000033.xml (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2007_000039.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2007_000039.xml (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2009_001444.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/voc/2009_001444.xml (100%)
 rename {store/sdk/src/test/resources/image/voc => sdk/sdk/src/test/resources/image/vocForSegmentationClass}/2007_000032.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.png (100%)
 rename {store/sdk/src/test/resources/image/voc => sdk/sdk/src/test/resources/image/vocForSegmentationClass}/2007_000033.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.png (100%)
 rename {store => sdk}/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.jpg (100%)
 rename {store => sdk}/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.png (100%)
 delete mode 100644 secondary_index/pom.xml
 delete mode 100644 secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
 delete mode 100644 secondary_index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
 delete mode 100644 store/CSDK/test/main.cpp
 delete mode 100644 store/sdk/pom.xml


[carbondata] 02/02: [Re-factory] Re-factory modules

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackylk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git

commit 55a07da027bae609c273a81813b73f6cd9db8363
Author: QiangCai <qi...@qq.com>
AuthorDate: Mon Feb 24 17:31:18 2020 +0800

    [Re-factory] Re-factory modules
    
    Why is this PR needed?
    there are too many spark-related modules.
    
    What changes were proposed in this PR?
    1. update docs
    2. merge spark-common, spark2, spark-datasource and spark-common-test
    into integration/spark
    3. fix all testcases after moving modules
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    No
    
    This closes #3592
---
 assembly/pom.xml                                   |    2 +-
 conf/dataload.properties.template                  |    2 +-
 docs/alluxio-guide.md                              |   10 +-
 docs/ddl-of-carbondata.md                          |    4 +-
 docs/documentation.md                              |    2 +-
 ...developer-guide.md => index-developer-guide.md} |    6 +-
 docs/index-server.md                               |    4 +-
 .../bloomfilter-index-guide.md}                    |    0
 .../index-management.md}                           |    2 +-
 .../lucene-index-guide.md}                         |    0
 .../mv-datamap-guide.md => index/mv-guide.md}      |    0
 docs/introduction.md                               |    6 +-
 docs/language-manual.md                            |    6 +-
 docs/performance-tuning.md                         |    2 +-
 docs/sdk-guide.md                                  |   12 +-
 docs/streaming-guide.md                            |    4 +-
 examples/flink/pom.xml                             |    2 +-
 integration/flink/pom.xml                          |    6 +-
 .../carbondata/hive/CarbonObjectInspector.java     |    2 +
 integration/presto/pom.xml                         |    2 +-
 integration/spark-common-cluster-test/pom.xml      |   16 +-
 .../sdv/generated/ComplexDataTypeTestCase.scala    |    2 +-
 .../datasource/SparkCarbonDataSourceTestCase.scala |    8 +-
 .../spark/sql/common/util/DataSourceTestUtil.scala |    6 +-
 .../src/test/resources/structofarray.csv           |   10 -
 integration/spark-common/pom.xml                   |  298 ---
 integration/spark-datasource/pom.xml               |  214 --
 .../org/apache/carbondata/sdk/util/BinaryUtil.java |   89 -
 .../SparkCarbonDataSourceBinaryTest.scala          |  729 -------
 .../datasource/SparkCarbonDataSourceTest.scala     | 1983 -----------------
 .../spark/sql/carbondata/datasource/TestUtil.scala |  181 --
 integration/{spark-common-test => spark}/pom.xml   |  328 ++-
 .../apache/carbondata/datamap/DataMapManager.java  |    0
 .../carbondata/datamap/IndexDataMapProvider.java   |    0
 .../spark/exception/ProcessMetaDataException.java  |    0
 .../spark/load/DecimalSerializableComparator.java  |    0
 .../SparkGenericRowReadSupportImpl.java            |    0
 .../spark/readsupport/SparkRowReadSupportImpl.java |    0
 .../org/apache/carbondata/spark/util/Util.java     |    0
 .../org/apache/carbondata/api/CarbonStore.scala    |    0
 .../converter/SparkDataTypeConverterImpl.java      |    0
 .../datamap/CarbonMergeBloomIndexFilesRDD.scala    |    0
 .../datamap/IndexDataMapRebuildRDD.scala           |    0
 .../apache/carbondata/datamap/TextMatchUDF.scala   |    0
 .../carbondata/events/AlterTableEvents.scala       |    0
 .../org/apache/carbondata/events/CacheEvents.scala |    0
 .../carbondata/events/CarbonInitEvents.scala       |    0
 .../carbondata/events/CleanFilesEvents.scala       |    0
 .../events/CreateCarbonRelationEvent.scala         |    0
 .../carbondata/events/CreateDatabaseEvents.scala   |    0
 .../carbondata/events/CreateTableEvents.scala      |    0
 .../apache/carbondata/events/DataMapEvents.scala   |    0
 .../carbondata/events/DeleteSegmentEvents.scala    |    0
 .../carbondata/events/DropDataMapEvents.scala      |    0
 .../apache/carbondata/events/DropTableEvents.scala |    0
 .../org/apache/carbondata/events/Events.scala      |    0
 .../org/apache/carbondata/events/IUDEvents.scala   |    0
 .../carbondata/events/IndexServerEvents.scala      |    0
 .../carbondata/events/LookupRelationEvents.scala   |    0
 .../carbondata/events/RefreshTableEvents.scala     |    0
 .../events/exception/EventExceptions.scala         |    0
 .../scala/org/apache/carbondata/geo/GeoUtils.scala |    0
 .../org/apache/carbondata/geo/InPolygonUDF.scala   |    0
 .../carbondata/indexserver/DataMapJobs.scala       |    0
 .../indexserver/DistributedCountRDD.scala          |    0
 .../indexserver/DistributedPruneRDD.scala          |    0
 .../indexserver/DistributedRDDUtils.scala          |    0
 .../indexserver/DistributedShowCacheRDD.scala      |    0
 .../carbondata/indexserver/IndexServer.scala       |    0
 .../indexserver/InvalidateSegmentCacheRDD.scala    |    0
 .../carbondata/indexserver/SegmentPruneRDD.scala   |    0
 .../carbondata/spark/CarbonColumnValidator.scala   |    0
 .../org/apache/carbondata/spark/CarbonOption.scala |    0
 .../carbondata/spark/CarbonSparkFactory.scala      |    0
 .../apache/carbondata/spark/InitInputMetrics.java  |    0
 .../scala/org/apache/carbondata/spark/KeyVal.scala |    0
 .../apache/carbondata/spark/StreamingOption.scala  |    0
 .../carbondata/spark/load/CsvRDDHelper.scala       |    0
 .../spark/load/DataLoadProcessBuilderOnSpark.scala |    0
 .../spark/load/DataLoadProcessorStepOnSpark.scala  |    0
 .../carbondata/spark/load/GlobalSortHelper.scala   |    0
 .../spark/rdd/CarbonDataRDDFactory.scala           |    0
 .../spark/rdd/CarbonDeltaRowScanRDD.scala          |    0
 .../spark/rdd/CarbonDropPartitionRDD.scala         |    0
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala      |    0
 .../carbondata/spark/rdd/CarbonIUDMergerRDD.scala  |    0
 .../carbondata/spark/rdd/CarbonMergerRDD.scala     |    0
 .../apache/carbondata/spark/rdd/CarbonRDD.scala    |    0
 .../carbondata/spark/rdd/CarbonScanRDD.scala       |    0
 .../spark/rdd/CarbonSparkPartition.scala           |    0
 .../spark/rdd/CarbonTableCompactor.scala           |    0
 .../carbondata/spark/rdd/CompactionFactory.scala   |    0
 .../apache/carbondata/spark/rdd/Compactor.scala    |    0
 .../spark/rdd/InsertTaskCompletionListener.scala   |    0
 .../spark/rdd/NewCarbonDataLoadRDD.scala           |    0
 .../spark/rdd/QueryTaskCompletionListener.scala    |    0
 .../carbondata/spark/rdd/SparkReadSupport.scala    |    1 +
 .../carbondata/spark/rdd/StreamHandoffRDD.scala    |    0
 .../carbondata/spark/rdd/UpdateDataLoad.scala      |    0
 .../spark/thriftserver/CarbonThriftServer.scala    |    0
 .../carbondata/spark/util/CarbonScalaUtil.scala    |    0
 .../carbondata/spark/util/CarbonSparkUtil.scala    |    0
 .../apache/carbondata/spark/util/CommonUtil.scala  |    0
 .../carbondata/spark/util/DataGenerator.scala      |    0
 .../spark/util/DataTypeConverterUtil.scala         |    0
 .../spark/vectorreader/ColumnarVectorWrapper.java  |    0
 .../vectorreader/ColumnarVectorWrapperDirect.java  |    0
 .../vectorreader/VectorizedCarbonRecordReader.java |    0
 .../apache/carbondata/store/SparkCarbonStore.scala |    0
 .../stream/CarbonStreamRecordReader.java           |    0
 .../carbondata/stream/StreamJobManager.scala       |    0
 .../streaming/CarbonSparkStreamingListener.scala   |    0
 .../streaming/CarbonStreamSparkStreaming.scala     |    0
 .../streaming/CarbonStreamingQueryListener.scala   |    0
 .../carbondata/streaming/StreamSinkFactory.scala   |    0
 .../org/apache/spark/CarbonInputMetrics.scala      |    0
 .../apache/spark/DataSkewRangePartitioner.scala    |    0
 .../org/apache/spark/rdd/CarbonMergeFilesRDD.scala |    0
 .../apache/spark/rdd/DataLoadCoalescedRDD.scala    |    0
 .../spark/rdd/DataLoadPartitionCoalescer.scala     |    0
 .../apache/spark/sql/CarbonBoundReference.scala    |    0
 .../apache/spark/sql/CarbonCatalystOperators.scala |    0
 .../org/apache/spark/sql/CarbonCountStar.scala     |    0
 .../apache/spark/sql/CarbonDataFrameWriter.scala   |    0
 .../spark/sql/CarbonDatasourceHadoopRelation.scala |    0
 .../apache/spark/sql/CarbonDictionaryWrapper.java  |    0
 .../scala/org/apache/spark/sql/CarbonEnv.scala     |    9 +-
 .../org/apache/spark/sql/CarbonExpressions.scala   |    0
 .../org/apache/spark/sql/CarbonExtensions.scala    |    0
 .../scala/org/apache/spark/sql/CarbonSession.scala |    0
 .../scala/org/apache/spark/sql/CarbonSource.scala  |    0
 .../spark/sql/CarbonSparkStreamingFactory.scala    |    0
 .../scala/org/apache/spark/sql/CarbonUtils.scala   |    0
 .../org/apache/spark/sql/CarbonVectorProxy.java    |    0
 .../org/apache/spark/sql/ColumnVectorFactory.java  |    0
 .../spark/sql/CustomDeterministicExpression.scala  |    0
 .../scala/org/apache/spark/sql/EnvHelper.scala     |    0
 .../main/scala/org/apache/spark/sql/SQLConf.scala  |    0
 .../apache/spark/sql/SparkUnknownExpression.scala  |    0
 .../execution/datasources/CarbonFileIndex.scala    |    0
 .../datasources/CarbonFileIndexReplaceRule.scala   |    0
 .../datasources/CarbonSparkDataSourceUtil.scala    |    0
 .../datasources/SparkCarbonFileFormat.scala        |    0
 .../readsupport/SparkUnsafeRowReadSuport.scala     |    0
 .../CarbonTaskCompletionListener.scala             |    0
 .../catalyst/AbstractCarbonSparkSQLParser.scala    |    0
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala    |    0
 .../spark/sql/catalyst/CarbonParserUtil.scala      |    0
 .../catalyst/CarbonTableIdentifierImplicit.scala   |    0
 .../spark/sql/catalyst/analysis/EmptyRule.scala    |    0
 .../sql/events/MergeBloomIndexEventListener.scala  |    0
 .../spark/sql/events/MergeIndexEventListener.scala |    0
 .../sql/execution/CastExpressionOptimization.scala |    0
 .../sql/execution/command/cache/CacheUtil.scala    |    0
 .../command/cache/CarbonDropCacheCommand.scala     |    0
 .../command/cache/CarbonShowCacheCommand.scala     |    0
 .../command/carbonTableSchemaCommon.scala          |    0
 .../datamap/CarbonCreateDataMapCommand.scala       |    0
 .../datamap/CarbonDataMapRebuildCommand.scala      |    0
 .../command/datamap/CarbonDataMapShowCommand.scala |    0
 .../command/datamap/CarbonDropDataMapCommand.scala |    0
 .../command/management/CarbonAddLoadCommand.scala  |    0
 .../CarbonAlterTableCompactionCommand.scala        |    0
 .../CarbonAlterTableFinishStreaming.scala          |    0
 .../management/CarbonCleanFilesCommand.scala       |    0
 .../command/management/CarbonCliCommand.scala      |    0
 .../management/CarbonDeleteLoadByIdCommand.scala   |    0
 .../CarbonDeleteLoadByLoadDateCommand.scala        |    0
 .../management/CarbonDeleteStageFilesCommand.scala |    0
 .../management/CarbonInsertFromStageCommand.scala  |    0
 .../management/CarbonInsertIntoCommand.scala       |    0
 .../CarbonInsertIntoHadoopFsRelationCommand.scala  |    0
 .../management/CarbonInsertIntoWithDf.scala        |    0
 .../command/management/CarbonLoadDataCommand.scala |    0
 .../command/management/CarbonLoadParams.scala      |    0
 .../management/CarbonShowLoadsCommand.scala        |    0
 .../command/management/CommonLoadUtils.scala       |    0
 .../management/RefreshCarbonTableCommand.scala     |    0
 .../mutation/CarbonProjectForDeleteCommand.scala   |    0
 .../mutation/CarbonProjectForUpdateCommand.scala   |    0
 .../command/mutation/CarbonTruncateCommand.scala   |    0
 .../command/mutation/DeleteExecution.scala         |    0
 .../command/mutation/HorizontalCompaction.scala    |    0
 .../mutation/HorizontalCompactionException.scala   |    0
 .../execution/command/mutation/IUDCommonUtil.scala |    0
 .../mutation/merge/CarbonMergeDataSetCommand.scala |    0
 .../merge/CarbonMergeDataSetException.scala        |    0
 .../mutation/merge/HistoryTableLoadHelper.scala    |    0
 .../mutation/merge/MergeDataSetBuilder.scala       |    0
 .../command/mutation/merge/MergeProjection.scala   |    0
 .../command/mutation/merge/MutationAction.scala    |    0
 .../command/mutation/merge/TranxManager.scala      |    0
 .../command/mutation/merge/interfaces.scala        |    0
 .../spark/sql/execution/command/package.scala      |    0
 .../CarbonAlterTableAddHivePartitionCommand.scala  |    0
 .../CarbonAlterTableDropHivePartitionCommand.scala |    0
 .../schema/CarbonAlterTableAddColumnCommand.scala  |    0
 ...nAlterTableColRenameDataTypeChangeCommand.scala |    0
 .../schema/CarbonAlterTableDropColumnCommand.scala |    0
 .../schema/CarbonAlterTableRenameCommand.scala     |    0
 .../schema/CarbonAlterTableSetCommand.scala        |    0
 .../schema/CarbonAlterTableUnsetCommand.scala      |    0
 .../command/stream/CarbonCreateStreamCommand.scala |    0
 .../command/stream/CarbonDropStreamCommand.scala   |    0
 .../command/stream/CarbonShowStreamsCommand.scala  |    0
 .../table/CarbonCreateDataSourceTableCommand.scala |    0
 .../table/CarbonCreateTableAsSelectCommand.scala   |    0
 .../command/table/CarbonCreateTableCommand.scala   |    0
 .../table/CarbonCreateTableLikeCommand.scala       |    0
 .../table/CarbonDescribeFormattedCommand.scala     |    0
 .../command/table/CarbonDropTableCommand.scala     |    0
 .../command/table/CarbonExplainCommand.scala       |    0
 .../table/CarbonShowCreateTableCommand.scala       |    0
 .../command/table/CarbonShowTablesCommand.scala    |    0
 .../datasources/SparkCarbonTableFormat.scala       |    0
 .../strategy/CarbonLateDecodeStrategy.scala        |    0
 .../sql/execution/strategy/CarbonPlanHelper.scala  |    0
 .../spark/sql/execution/strategy/DDLHelper.scala   |   19 +-
 .../spark/sql/execution/strategy/DDLStrategy.scala |   11 +-
 .../spark/sql/execution/strategy/DMLHelper.scala   |    0
 .../execution/strategy/MixedFormatHandler.scala    |    0
 .../sql/execution/strategy/PushDownHelper.scala    |    0
 .../strategy/StreamingTableStrategy.scala          |    0
 .../streaming/CarbonAppendableStreamSink.scala     |    0
 .../spark/sql/hive/CarbonAnalysisRules.scala       |    0
 .../org/apache/spark/sql/hive/CarbonAnalyzer.scala |    0
 .../spark/sql/hive/CarbonFileMetastore.scala       |    0
 .../spark/sql/hive/CarbonHiveMetaStore.scala       |    0
 .../spark/sql/hive/CarbonHiveMetadataUtil.scala    |    0
 .../org/apache/spark/sql/hive/CarbonMVRules.scala  |    0
 .../apache/spark/sql/hive/CarbonMetaStore.scala    |    0
 .../org/apache/spark/sql/hive/CarbonRelation.scala |    0
 .../spark/sql/hive/CarbonSessionCatalog.scala      |    0
 .../spark/sql/hive/CarbonSessionCatalogUtil.scala  |    0
 .../apache/spark/sql/hive/CarbonSessionUtil.scala  |    0
 .../spark/sql/hive/CarbonSqlAstBuilder.scala       |    0
 .../org/apache/spark/sql/hive/CarbonSqlConf.scala  |    0
 .../CreateCarbonSourceTableAsSelectCommand.scala   |    0
 .../apache/spark/sql/hive/DistributionUtil.scala   |    0
 .../spark/sql/hive/SqlAstBuilderHelper.scala       |    0
 .../spark/sql/hive/cli/CarbonSQLCLIDriver.scala    |    0
 .../execution/command/CarbonHiveCommands.scala     |    0
 .../execution/command/CarbonResetCommand.scala     |    0
 .../sql/listeners/DropCacheEventListeners.scala    |    0
 .../apache/spark/sql/listeners/MVListeners.scala   |    0
 .../spark/sql/listeners/PrePrimingListener.scala   |    0
 .../sql/listeners/ShowCacheEventListeners.scala    |    0
 .../apache/spark/sql/optimizer/CarbonFilters.scala |    0
 .../apache/spark/sql/optimizer/CarbonIUDRule.scala |    0
 .../sql/optimizer/CarbonUDFTransformRule.scala     |    0
 .../parser/CarbonExtensionSpark2SqlParser.scala    |   13 +-
 .../sql/parser/CarbonExtensionSqlParser.scala      |    0
 .../spark/sql/parser/CarbonSpark2SqlParser.scala   |    0
 .../spark/sql/parser/CarbonSparkSqlParser.scala    |    0
 .../sql/parser/CarbonSparkSqlParserUtil.scala      |    2 +-
 .../org/apache/spark/sql/profiler/Profiler.scala   |    0
 .../spark/sql/profiler/ProfilerListener.scala      |    0
 .../apache/spark/sql/profiler/ProfilerLogger.scala |    2 +-
 .../Jobs/BlockletDataMapDetailsWithSchema.java     |    0
 .../Jobs/CarbonBlockLoaderHelper.java              |    0
 .../Jobs/DistributableBlockletDataMapLoader.java   |    0
 .../Jobs/SparkBlockletDataMapLoaderJob.scala       |    0
 .../secondaryindex/command/DropIndexCommand.scala  |    0
 .../command/RegisterIndexTableCommand.scala        |    0
 .../secondaryindex/command/SICreationCommand.scala |    0
 .../sql/secondaryindex/command/SILoadCommand.scala |    0
 .../command/SIRebuildSegmentCommand.scala          |    0
 .../command/ShowIndexesCommand.scala               |    0
 .../AlterTableColumnRenameEventListener.scala      |    0
 .../AlterTableCompactionPostEventListener.scala    |    0
 .../events/AlterTableDropColumnEventListener.scala |    0
 .../AlterTableMergeIndexSIEventListener.scala      |    0
 .../events/AlterTableRenameEventListener.scala     |    0
 .../events/CleanFilesPostEventListener.scala       |    0
 .../events/CreateCarbonRelationEventListener.scala |    0
 .../events/DeleteFromTableEventListener.scala      |    0
 .../events/DeleteSegmentByDateListener.scala       |    0
 .../events/DeleteSegmentByIdListener.scala         |    0
 .../events/DropCacheSIEventListener.scala          |    0
 .../sql/secondaryindex/events/LoadSIEvents.scala   |    0
 .../events/SIDropEventListener.scala               |    0
 .../events/SILoadEventListener.scala               |    0
 .../SILoadEventListenerForFailedSegments.scala     |    0
 .../events/SIRefreshEventListener.scala            |    0
 .../events/ShowCacheSIEventListener.scala          |    0
 .../events/UpdateTablePreEventListener.scala       |    0
 .../exception/IndexTableExistException.java        |    0
 .../exception/SecondaryIndexException.java         |    0
 .../hive/CarbonInternalMetastore.scala             |    2 +-
 .../joins/BroadCastSIFilterPushJoin.scala          |    0
 .../load/CarbonInternalLoaderUtil.java             |    0
 .../spark/sql/secondaryindex/load/Compactor.scala  |    0
 .../load/RowComparatorWithOutKettle.java           |    0
 .../optimizer/CarbonCostBasedOptimizer.java        |    0
 .../optimizer/CarbonSITransformationRule.scala     |    0
 .../optimizer/CarbonSecondaryIndexOptimizer.scala  |    0
 .../query/CarbonSecondaryIndexExecutor.java        |    0
 .../query/SecondaryIndexQueryResultProcessor.java  |    0
 .../secondaryindex/rdd/CarbonSIRebuildRDD.scala    |    0
 .../rdd/CarbonSecondaryIndexRDD.scala              |    0
 .../secondaryindex/rdd/SecondaryIndexCreator.scala |    0
 .../util/CarbonInternalScalaUtil.scala             |    0
 .../sql/secondaryindex/util/FileInternalUtil.scala |    0
 .../sql/secondaryindex/util/IndexTableUtil.java    |    0
 .../sql/secondaryindex/util/InternalKeyVal.scala   |    0
 .../secondaryindex/util/SecondaryIndexUtil.scala   |    0
 .../spark/sql/test/ResourceRegisterAndCopier.scala |    0
 .../spark/sql/test/SparkTestQueryExecutor.scala}   |   25 +-
 .../apache/spark/sql/test/TestQueryExecutor.scala  |   32 +-
 .../spark/sql/test/util/CarbonFunSuite.scala       |    0
 .../org/apache/spark/sql/test/util/PlanTest.scala  |    0
 .../org/apache/spark/sql/test/util/QueryTest.scala |   90 +-
 .../apache/spark/sql/util/CarbonException.scala    |    0
 .../spark/sql/util/CarbonMetastoreTypes.scala      |    0
 .../org/apache/spark/sql/util/SparkSQLUtil.scala   |    0
 .../apache/spark/sql/util/SparkTypeConverter.scala |    0
 .../org/apache/spark/util/AlterTableUtil.scala     |    0
 .../apache/spark/util/CarbonReflectionUtils.scala  |    0
 .../scala/org/apache/spark/util/CleanFiles.scala   |    0
 .../scala/org/apache/spark/util/Compaction.scala   |    0
 .../apache/spark/util/DeleteSegmentByDate.scala    |    0
 .../org/apache/spark/util/DeleteSegmentById.scala  |    0
 .../scala/org/apache/spark/util/FileUtils.scala    |    0
 .../org/apache/spark/util/MergeIndexUtil.scala     |    0
 .../org/apache/spark/util/ScalaCompilerUtil.scala  |    0
 .../scala/org/apache/spark/util/SparkUtil.scala    |    0
 .../scala/org/apache/spark/util/TableAPIUtil.scala |    0
 .../scala/org/apache/spark/util/TableLoader.scala  |    0
 .../spark/adapter/CarbonToSparkAdapter.scala       |    0
 .../apache/spark/sql/CarbonBoundReference.scala    |    0
 .../apache/spark/sql/CarbonToSparkAdapter.scala    |    7 +-
 .../apache/spark/sql/MixedFormatHandlerUtil.scala  |    0
 .../execution/strategy/CarbonDataSourceScan.scala  |    0
 .../spark/sql/hive/CarbonSessionStateBuilder.scala |    0
 .../spark/adapter/CarbonToSparkAdapter.scala       |    0
 .../apache/spark/sql/CarbonBoundReference.scala    |    0
 .../apache/spark/sql/CarbonToSparkAdapter.scala    |    5 +-
 .../apache/spark/sql/MixedFormatHandlerUtil.scala  |    0
 .../execution/strategy/CarbonDataSourceScan.scala  |    0
 .../spark/sql/hive/CarbonSessionStateBuilder.scala |    0
 ...org.apache.spark.sql.sources.DataSourceRegister |    1 +
 ...pache.spark.sql.test.TestQueryExecutorRegister} |    2 +-
 .../org/apache/carbondata/sdk/util/BinaryUtil.java |    0
 .../stream/CarbonStreamRecordReaderTest.java       |    0
 .../src/test/resources/100_olap.csv                |    0
 .../src/test/resources/10dim_4msr.csv              |    0
 .../src/test/resources/32000char.csv               |    0
 .../src/test/resources/Array.csv                   |    0
 .../src/test/resources/IUD/T_Hive1.csv             |    0
 .../src/test/resources/IUD/bad_record.csv          |    0
 .../src/test/resources/IUD/badrecord.csv           |    0
 .../src/test/resources/IUD/comp1.csv               |    0
 .../src/test/resources/IUD/comp2.csv               |    0
 .../src/test/resources/IUD/comp3.csv               |    0
 .../src/test/resources/IUD/comp4.csv               |    0
 .../src/test/resources/IUD/dest.csv                |    0
 .../src/test/resources/IUD/negativevalue.csv       |    0
 .../src/test/resources/IUD/other.csv               |    0
 .../src/test/resources/IUD/sample.csv              |    0
 .../src/test/resources/IUD/sample_updated.csv      |    0
 .../src/test/resources/IUD/source2.csv             |    0
 .../src/test/resources/IUD}/source3.csv            |    0
 .../src/test/resources/IUD/update01.csv            |    0
 .../src/test/resources/OLDFORMATTABLE.csv          |    0
 .../src/test/resources/OLDFORMATTABLEHIVE.csv      |    0
 .../src/test/resources/Struct.csv                  |    0
 .../src/test/resources/StructofStruct.csv          |    0
 .../src/test/resources/Test_Data1_Logrithmic.csv   |    0
 .../src/test/resources/adap.csv                    |    0
 .../src/test/resources/adap_double1.csv            |    0
 .../src/test/resources/adap_double2.csv            |    0
 .../src/test/resources/adap_double3.csv            |    0
 .../src/test/resources/adap_double4.csv            |    0
 .../src/test/resources/adap_int1.csv               |    0
 .../src/test/resources/adap_int2.csv               |    0
 .../src/test/resources/adap_int3.csv               |    0
 .../src/test/resources/alldatatypeforpartition.csv |    0
 .../complex/20160423/1400_1405/complex.dictionary  |    0
 .../sample/20160423/1400_1405/sample.dictionary    |    0
 .../src/test/resources/array1.csv                  |    0
 .../src/test/resources/arrayColumnEmpty.csv        |    0
 .../src/test/resources/avgTest.csv                 |    0
 .../src/test/resources/badrecords/bigtab.csv       |    0
 .../src/test/resources/badrecords/bigtabbad.csv    |    0
 .../src/test/resources/badrecords/complexdata.csv  |    0
 .../src/test/resources/badrecords/datasample.csv   |    0
 .../src/test/resources/badrecords/dummy.csv        |    0
 .../src/test/resources/badrecords/dummy2.csv       |    0
 .../resources/badrecords/emptyTimeStampValue.csv   |    0
 .../src/test/resources/badrecords/emptyValues.csv  |    0
 .../resources/badrecords/insufficientColumns.csv   |    0
 .../test/resources/badrecords/seriazableValue.csv  |    0
 .../src/test/resources/bigIntData.csv              |    0
 .../src/test/resources/bigIntDataWithHeader.csv    |    0
 .../src/test/resources/bigIntDataWithoutHeader.csv |    0
 .../test/resources/big_decimal_without_header.csv  |    0
 .../src/test/resources/big_int_Decimal.csv         |    0
 .../src/test/resources/binaryDataBase64.csv        |    0
 .../src/test/resources/binaryDataHex.csv           |    0
 .../src/test/resources/binaryStringNullData.csv    |    0
 .../src/test/resources/binarystringdata.csv        |    0
 .../src/test/resources/binarystringdata2.csv       |    0
 .../test/resources/binarystringdatawithHead.csv    |    0
 .../src/test/resources/bool/supportBoolean.csv     |    0
 .../resources/bool/supportBooleanBadRecords.csv    |    0
 .../bool/supportBooleanDifferentFormat.csv         |    0
 .../resources/bool/supportBooleanOnlyBoolean.csv   |    0
 .../bool/supportBooleanTwoBooleanColumns.csv       |    0
 .../bool/supportBooleanWithFileHeader.csv          |    0
 .../src/test/resources/channelsId.csv              |    0
 .../src/test/resources/character_carbon.csv        |    0
 .../src/test/resources/character_hive.csv          |    0
 .../test/resources/columndictionary/country.csv    |    0
 .../src/test/resources/columndictionary/name.csv   |    0
 .../src/test/resources/comment.csv                 |    0
 .../src/test/resources/compaction/compaction1.csv  |    0
 .../resources/compaction/compaction1_forhive.csv   |    0
 .../src/test/resources/compaction/compaction2.csv  |    0
 .../src/test/resources/compaction/compaction3.csv  |    0
 .../test/resources/compaction/compactionIUD1.csv   |    0
 .../test/resources/compaction/compactionIUD2.csv   |    0
 .../test/resources/compaction/compactionIUD3.csv   |    0
 .../test/resources/compaction/compactionIUD4.csv   |    0
 .../test/resources/compaction/compactioncard2.csv  |    0
 .../compaction/compactioncard2_forhive.csv         |    0
 .../compaction/nodictionary_compaction.csv         |    0
 .../src/test/resources/complexTypeDecimal.csv      |    0
 .../test/resources/complexTypeDecimalNested.csv    |    0
 .../resources/complexTypeDecimalNestedHive.csv     |    0
 .../src/test/resources/complexbinary.csv           |    0
 .../src/test/resources/complexdata.csv             |    0
 .../src/test/resources/complexdata1.csv            |    0
 .../src/test/resources/complexdata2.csv            |    0
 .../src/test/resources/complexdata3.csv            |    0
 .../src/test/resources/complexdatareordered.csv    |    0
 .../src/test/resources/complexdatastructextra.csv  |    0
 .../resources/complextypediffentcolheaderorder.csv |    0
 .../src/test/resources/complextypesample.csv       |    0
 .../resources/complextypespecialchardelimiter.csv  |    0
 .../src/test/resources/data.csv                    |    0
 .../src/test/resources/data1.csv                   |    0
 .../src/test/resources/data2.csv                   |    0
 .../src/test/resources/data2_DiffTimeFormat.csv    |    0
 .../src/test/resources/dataIncrement.csv           |    0
 .../src/test/resources/dataWithEmptyRows.csv       |    0
 .../src/test/resources/dataWithNegativeValues.csv  |    0
 .../src/test/resources/dataWithNullFirstLine.csv   |    0
 .../src/test/resources/dataWithSingleQuote.csv     |    0
 .../src/test/resources/data_alltypes.csv           |    0
 .../src/test/resources/data_beyond68yrs.csv        |    0
 .../src/test/resources/data_big.csv                |    0
 .../test/resources/data_partition_badrecords.csv   |    0
 .../src/test/resources/data_sort.csv               |    0
 .../src/test/resources/data_timestamp.csv          |    0
 .../src/test/resources/data_withCAPSHeader.csv     |    0
 .../src/test/resources/data_withMixedHeader.csv    |    0
 .../src/test/resources/data_with_all_types.csv     |    0
 .../src/test/resources/data_with_special_char.csv  |    0
 .../src/test/resources/datadelimiter.csv           |    0
 .../src/test/resources/datanullmeasurecol.csv      |    0
 .../src/test/resources/dataretention1.csv          |    0
 .../src/test/resources/dataretention11.csv         |    0
 .../src/test/resources/dataretention2.csv          |    0
 .../src/test/resources/dataretention3.csv          |    0
 .../src/test/resources/datasample.csv              |    0
 .../src/test/resources/datasamplecomplex.csv       |    0
 .../src/test/resources/datasamplefordate.csv       |    0
 .../src/test/resources/datasamplenull.csv          |    0
 .../src/test/resources/datasingleCol.csv           |    0
 .../src/test/resources/datasingleComplexCol.csv    |    0
 .../resources/datawithNegeativewithoutHeader.csv   |    0
 .../src/test/resources/datawithNegtiveNumber.csv   |    0
 .../src/test/resources/datawithbackslash.csv       |    0
 .../src/test/resources/datawithblanklines.csv      |    0
 .../test/resources/datawithcomplexspecialchar.csv  |    0
 .../src/test/resources/datawithescapecharacter.csv |    0
 .../src/test/resources/datawithmaxbigint.csv       |    0
 .../src/test/resources/datawithmaxinteger.csv      |    0
 .../src/test/resources/datawithmaxminbigint.csv    |    0
 .../src/test/resources/datawithmaxmininteger.csv   |    0
 .../src/test/resources/datawithminbigint.csv       |    0
 .../src/test/resources/datawithmininteger.csv      |    0
 .../src/test/resources/datawithnullmeasure.csv     |    0
 .../src/test/resources/datawithnullmsrs.csv        |    0
 .../src/test/resources/datawithoutheader.csv       |    0
 .../test/resources/datawithspecialcharacter.csv    |    0
 .../src/test/resources/datedatafile.csv            |    0
 .../src/test/resources/dblocation/test.csv         |    0
 .../test/resources/decimalBoundaryDataCarbon.csv   |    0
 .../src/test/resources/decimalBoundaryDataHive.csv |    0
 .../src/test/resources/decimalData.csv             |    0
 .../src/test/resources/decimalDataWithHeader.csv   |    0
 .../test/resources/decimalDataWithoutHeader.csv    |    0
 .../src/test/resources/decimal_int_range.csv       |    0
 .../src/test/resources/deviceInformationId.csv     |    0
 .../src/test/resources/deviceInformationId2.csv    |    0
 .../src/test/resources/dimSample.csv               |    0
 .../src/test/resources/dimTableSample.csv          |    0
 .../src/test/resources/double.csv                  |    0
 .../double/data_notitle_AdaptiveFloating_byte.csv  |    0
 .../double/data_notitle_AdaptiveFloating_int.csv   |    0
 .../double/data_notitle_AdaptiveFloating_short.csv |    0
 .../data_notitle_AdaptiveFloating_short_int.csv    |    0
 .../test/resources/double/data_notitle_byte.csv    |    0
 .../src/test/resources/double/data_notitle_int.csv |    0
 .../test/resources/double/data_notitle_long.csv    |    0
 .../test/resources/double/data_notitle_short.csv   |    0
 .../resources/double/data_notitle_short_int.csv    |    0
 .../src/test/resources/emp.csv                     |    0
 .../src/test/resources/emptyDimensionData.csv      |    0
 .../src/test/resources/emptyDimensionDataHive.csv  |    0
 .../src/test/resources/emptylines.csv              |    0
 .../resources/emptyrow/csvwithonlyspacechar.csv    |    0
 .../src/test/resources/emptyrow/emptyRows.csv      |    0
 .../src/test/resources/encoding_types.csv          |    0
 .../src/test/resources/filter/betweenFilter.csv    |    0
 .../src/test/resources/filter/datagrtlrt.csv       |    0
 .../src/test/resources/filter/datawithnull.csv     |    0
 .../src/test/resources/filter/datawithoutnull.csv  |    0
 .../src/test/resources/filter/emp2.csv             |    0
 .../src/test/resources/filter/emp2allnull.csv      |    0
 .../src/test/resources/filter/emp2nonull.csv       |    0
 .../src/test/resources/filter/notEqualToFilter.csv |    0
 .../src/test/resources/filter/notNullFilter.csv    |    0
 .../src/test/resources/floatSample.csv             |    0
 .../src/test/resources/geodata.csv                 |    0
 .../src/test/resources/globalsort/sample1.csv      |    0
 .../src/test/resources/globalsort/sample2.csv      |    0
 .../src/test/resources/globalsort/sample3.csv      |    0
 .../resources/hiverangenodictionarycompare.csv     |    0
 .../src/test/resources/invalidMeasures.csv         |    0
 .../src/test/resources/j2.csv                      |    0
 .../src/test/resources/join/data1.csv              |    0
 .../src/test/resources/join/data2.csv              |    0
 .../src/test/resources/join/emp.csv                |    0
 .../src/test/resources/join/employee.csv           |    0
 .../src/test/resources/join/mgr.csv                |    0
 .../src/test/resources/join/mobile.csv             |    0
 .../jsonFiles/data/PrimitiveTypeWithNull.json      |    0
 .../resources/jsonFiles/data/StructOfAllTypes.json |    0
 .../resources/jsonFiles/data/allPrimitiveType.json |    0
 .../jsonFiles/data/allPrimitiveTypeBadRecord.json  |    0
 .../jsonFiles/data/arrayOfStructOfStruct.json      |    0
 .../data/arrayOfarrayOfarrayOfStruct.json          |    0
 .../JsonReaderTest/MultipleRowSingleLineJson.json  |    0
 .../JsonReaderTest/SingleRowSingleLineJson.json    |    0
 ...pleRowMultipleLineJsonWithRecordIdentifier.json |    0
 ...gleRowMultipleLineJsonWithRecordIdentifier.json |    0
 ...ingleRowSingleLineJsonWithRecordIdentifier.json |    0
 .../allPrimitiveTypeMultipleRows.json              |    0
 .../allPrimitiveTypeSingleArray.json               |    0
 .../jsonFiles/schema/StructOfAllTypes.avsc         |    0
 .../jsonFiles/schema/arrayOfStructOfStruct.avsc    |    0
 .../schema/arrayOfarrayOfarrayOfStruct.avsc        |    0
 .../src/test/resources/lessthandatacolumndata.csv  |    0
 .../resources/loadMultiFiles/.invisibilityfile     |    0
 .../src/test/resources/loadMultiFiles/_SUCCESS     |    0
 .../src/test/resources/loadMultiFiles/data.csv     |    0
 .../test/resources/loadMultiFiles/emptyfile.csv    |    0
 .../loadMultiFiles/nestedfolder1/data.csv          |    0
 .../loadMultiFiles/nestedfolder1/data1.csv         |    0
 .../nestedfolder1/nestedfolder2/data.csv           |    0
 .../src/test/resources/loadMultiFiles/non-csv      |    0
 .../src/test/resources/localdictionary.csv         |    0
 .../test/resources/locationInfoActiveCountry.csv   |    0
 .../src/test/resources/mac.csv                     |    0
 .../src/test/resources/measureinsertintotest.csv   |    0
 .../src/test/resources/mobileimei.csv              |    0
 .../src/test/resources/mv_sampledata.csv           |    0
 .../src/test/resources/newsample.csv               |    0
 .../src/test/resources/noneCsvFormat.cs            |    0
 .../src/test/resources/nontransactional.csv        |    0
 .../src/test/resources/nontransactional1.csv       |    0
 .../src/test/resources/nullSample.csv              |    0
 .../src/test/resources/nullandnonparsableValue.csv |    0
 .../src/test/resources/nullmeasurevalue.csv        |    0
 .../src/test/resources/nullvalueserialization.csv  |    0
 .../resources/numeric_column_invalid_values.csv    |    0
 .../src/test/resources/oscon_10.csv                |    0
 .../src/test/resources/outofrange.csv              |    0
 .../test/resources/overwriteTable1_noRecord.csv    |    0
 .../test/resources/overwriteTable1_someRecord.csv  |    0
 .../test/resources/overwriteTable2_noRecord.csv    |    0
 .../test/resources/overwriteTable2_someRecord.csv  |    0
 .../src/test/resources/partData.csv                |    0
 .../src/test/resources/partition_data.csv          |    0
 .../src/test/resources/partition_data_example.csv  |    0
 .../resources/predefdic/allpredefdictionary.csv    |    0
 .../src/test/resources/predefdic/data3.csv         |    0
 .../src/test/resources/predefdic/dicfilepath.csv   |    0
 .../src/test/resources/products.csv                |    0
 .../src/test/resources/range_column/dataskew.csv   |    0
 .../src/test/resources/rangedata.csv               |    0
 .../src/test/resources/rangedatasample.csv         |    0
 .../test/resources/rangenodictionarycompare.csv    |    0
 .../src/test/resources/restructure/data1.csv       |    0
 .../src/test/resources/restructure/data2.csv       |    0
 .../src/test/resources/restructure/data3.csv       |    0
 .../src/test/resources/restructure/data4.csv       |    0
 .../src/test/resources/restructure/data5.csv       |    0
 .../src/test/resources/restructure/data6.csv       |    0
 .../src/test/resources/restructure/data7.csv       |    0
 .../src/test/resources/restructure/data_2000.csv   |    0
 .../src/test/resources/sales_data.csv              |    0
 .../src/test/resources/sample                      |    0
 .../src/test/resources/sample.csv                  |    0
 .../src/test/resources/sample.csv.bz2              |  Bin
 .../src/test/resources/sample.csv.gz               |  Bin
 .../src/test/resources/sampleComplex.csv           |    0
 .../src/test/resources/sample_withDelimiter017.csv |    0
 .../src/test/resources/secindex/IUD/sample_1.csv   |    0
 .../src/test/resources/secindex/IUD/sample_2.csv   |    0
 .../src/test/resources/secindex/data_10000.csv     |    0
 .../src/test/resources/secindex/datafile_100.csv   |    0
 .../src/test/resources/secindex/dest.csv           |    0
 .../src/test/resources/secindex/dest1.csv          |    0
 .../src/test/resources/secindex/dest2.csv          |    0
 .../src/test/resources/secindex/dest3.csv          |    0
 .../src/test/resources/secindex/firstunique.csv    |    0
 .../src/test/resources/secindex/index.csv          |    0
 .../resources/secindex/secondaryIndexLikeTest.csv  |    0
 .../src/test/resources/secindex/secondunique.csv   |    0
 .../src/test/resources/secindex}/source3.csv       |    0
 .../src/test/resources/seq_20Records.csv           |    0
 .../src/test/resources/shortintboundary.csv        |    0
 .../src/test/resources/shortolap.csv               |    0
 .../test/resources/sort_columns/alldatatype1.csv   |    0
 .../test/resources/sort_columns/alldatatype2.csv   |    0
 .../src/test/resources/source.csv                  |    0
 .../src/test/resources/source_without_header.csv   |    0
 .../src/test/resources/streamSample.csv            |    0
 .../resources/streamSample_with_long_string.csv    |    0
 .../src/test/resources/struct_all.csv              |    0
 .../src/test/resources/structofarray.csv           |   42 +-
 .../src/test/resources/structusingstruct.csv       |    0
 .../src/test/resources/temp/data1.csv              |    0
 .../src/test/resources/test.json                   |    0
 .../test/resources/testBigInt_boundary_value.csv   |    0
 .../src/test/resources/testShortAndIntDataType.csv |    0
 .../src/test/resources/test_json.json              |    0
 .../src/test/resources/timeStampFormatData1.csv    |    0
 .../src/test/resources/timeStampFormatData2.csv    |    0
 .../src/test/resources/timeseriestest.csv          |    0
 .../src/test/resources/timestamp.csv               |    0
 .../src/test/resources/timestampdata.csv           |    0
 .../src/test/resources/timestampdatafile.csv       |    0
 .../src/test/resources/tpch/customers.csv          |    0
 .../src/test/resources/tpch/lineitem.csv           |    0
 .../src/test/resources/tpch/nation.csv             |    0
 .../src/test/resources/tpch/orders.csv             |    0
 .../src/test/resources/tpch/region.csv             |    0
 .../src/test/resources/tpch/supplier.csv           |    0
 .../src/test/resources/unicodechar.csv             |    0
 .../src/test/resources/uniq.csv                    |    0
 .../src/test/resources/uniqwithoutheader.csv       |    0
 .../src/test/resources/vardhandaterestruct.csv     |    0
 .../src/test/resources/verticalDelimitedData.csv   |    0
 .../BloomCoarseGrainDataMapFunctionSuite.scala     |   11 +-
 .../bloom/BloomCoarseGrainDataMapSuite.scala       |    0
 .../bloom/BloomCoarseGrainDataMapTestUtil.scala    |    0
 .../lucene/LuceneCoarseGrainDataMapSuite.scala     |    0
 .../lucene/LuceneFineGrainDataMapSuite.scala       |    0
 .../scala/org/apache/carbondata/geo/GeoTest.scala  |    0
 .../aggquery/IntegerDataTypeTestCase.scala         |    0
 .../spark/testsuite/bigdecimal/TestBigInt.scala    |    5 +-
 .../TestDimensionWithDecimalDataType.scala         |    5 +-
 .../testsuite/binary/TestBinaryDataType.scala      |    1 +
 .../complexType/TestAdaptiveComplexType.scala      |    9 +-
 .../TestAdaptiveEncodingForNullValues.scala        |    4 +-
 ...eEncodingSafeColumnPageForComplexDataType.scala |    0
 ...ncodingUnsafeColumnPageForComplexDataType.scala |    0
 ...ingUnsafeHeapColumnPageForComplexDataType.scala |    0
 .../complexType/TestAllComplexDataType.scala       |    0
 .../complexType/TestCompactionComplexType.scala    |    6 +-
 .../complexType/TestComplexDataType.scala          |   20 +-
 .../complexType/TestComplexTypeQuery.scala         |    4 +-
 .../complexType/TestComplexTypeWithBigArray.scala  |    0
 .../complexType/TestCreateTableWithDouble.scala    |    0
 .../dataload/MultiFilesDataLoagdingTestCase.scala  |    0
 .../testsuite/dataload/TestLoadDataGeneral.scala   |    0
 .../dataload/TestLoadDataWithAutoLoadMerge.scala   |    0
 .../dataload/TestLoadDataWithBlankLine.scala       |    1 -
 .../dataload/TestLoadDataWithCompression.scala     |    2 +-
 .../TestLoadDataWithEmptyArrayColumns.scala        |    6 +-
 .../dataload/TestLoadDataWithJunkChars.scala       |    2 +-
 .../dataload/TestLoadDataWithMaxMinBigInt.scala    |    0
 .../dataload/TestLoadDataWithMaxMinInteger.scala   |    0
 .../dataload/TestLoadDataWithNullMeasures.scala    |    0
 .../TestLoadDataWithSortColumnBounds.scala         |    0
 .../dataload/TestLoadDataWithUnsafeMemory.scala    |    0
 .../dataload/TestLoadDataWithYarnLocalDirs.scala   |    0
 .../dataload/TestNoInvertedIndexLoadAndQuery.scala |    3 +
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala      |    5 +-
 .../spark/testsuite/emptyrow/TestEmptyRows.scala   |    5 +-
 .../testsuite/emptyrow/TestSkipEmptyLines.scala    |    0
 .../primitiveTypes/ArrayDataTypeTestCase.scala     |    0
 .../primitiveTypes/DoubleDataTypeTestCase.scala    |    0
 .../primitiveTypes/FloatDataTypeTestCase.scala     |    0
 .../primitiveTypes/MapDataTypeTestCase.scala       |    0
 .../TestAdaptiveEncodingForPrimitiveTypes.scala    |    2 +-
 .../carbondata/spark/testsuite/TestCarbonCli.scala |    4 +-
 .../testsuite/addsegment/AddSegmentTestCase.scala  |   33 +-
 .../aggquery/AllDataTypesTestCaseAggregate.scala   |    3 +
 .../testsuite/aggquery/AverageQueryTestCase.scala  |    5 +-
 .../allqueries/AllDataTypesTestCase.scala          |    2 -
 .../testsuite/allqueries/DoubleDataTypeTest.scala  |    0
 .../InsertIntoCarbonTableSpark2TestCase.scala      |    4 +-
 .../allqueries/InsertIntoCarbonTableTestCase.scala |    0
 .../allqueries/MeasureOnlyTableTestCases.scala     |    6 +-
 ...ryWithColumnMetCacheAndCacheLevelProperty.scala |    0
 .../allqueries/TestQueryWithoutDataLoad.scala      |    0
 .../allqueries/TestTableNameHasDbName.scala        |    0
 .../alterTable/TestAlterTableAddColumns.scala      |    0
 .../TestAlterTableCompactionLevelThreshold.scala   |    0
 .../TestAlterTableSortColumnsProperty.scala        |    6 +
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |    0
 .../badrecordloger/BadRecordActionTest.scala       |    1 +
 .../badrecordloger/BadRecordEmptyDataTest.scala    |   13 +-
 .../badrecordloger/BadRecordLoggerTest.scala       |    6 +-
 .../testsuite/bigdecimal/TestAvgForBigInt.scala    |    0
 .../testsuite/bigdecimal/TestBigDecimal.scala      |   13 +-
 .../bigdecimal/TestNullAndEmptyFields.scala        |    2 -
 .../bigdecimal/TestNullAndEmptyFieldsUnsafe.scala  |    7 +-
 .../blockprune/BlockPruneQueryTestCase.scala       |    0
 .../CarbonCustomBlockDistributionTest.scala        |    4 +-
 .../booleantype/BooleanDataTypesBaseTest.scala     |    4 +-
 .../booleantype/BooleanDataTypesBigFileTest.scala  |    4 +-
 .../booleantype/BooleanDataTypesFilterTest.scala   |   13 +-
 .../booleantype/BooleanDataTypesInsertTest.scala   |   18 +-
 .../booleantype/BooleanDataTypesLoadTest.scala     |   47 +-
 .../BooleanDataTypesParameterTest.scala            |    4 +-
 .../booleantype/BooleanDataTypesSortTest.scala     |    4 +-
 .../compress/TestBooleanCompressSuite.scala        |    8 +-
 .../compaction/TestHybridCompaction.scala          |    7 +-
 .../TestAlterTableWithTableComment.scala           |    0
 ...bonFileInputFormatWithExternalCarbonTable.scala |    0
 .../TestCreateDDLForComplexMapType.scala           |    2 +-
 .../createTable/TestCreateExternalTable.scala      |    0
 .../TestCreateHiveTableWithCarbonDS.scala          |    0
 .../createTable/TestCreateTableAsSelect.scala      |    6 +-
 .../createTable/TestCreateTableIfNotExists.scala   |    0
 .../createTable/TestCreateTableLike.scala          |    0
 .../TestCreateTableWithBlockletSize.scala          |    0
 .../TestCreateTableWithColumnComment.scala         |    0
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |    0
 .../TestCreateTableWithCompactionOptions.scala     |    0
 ...TestCreateTableWithDatabaseNameCaseChange.scala |    0
 .../TestCreateTableWithPageSizeInMb.scala          |    0
 .../createTable/TestCreateTableWithSortScope.scala |    0
 .../TestCreateTableWithSpaceInColumnName.scala     |    0
 .../TestCreateTableWithTableComment.scala          |    0
 .../TestNonTransactionalCarbonTable.scala          |    0
 .../TestNonTransactionalCarbonTableForBinary.scala |    2 +-
 ...TestNonTransactionalCarbonTableForMapType.scala |    0
 ...TestNonTransactionalCarbonTableJsonWriter.scala |    0
 ...nTransactionalCarbonTableWithAvroDataType.scala |    6 -
 ...onTransactionalCarbonTableWithComplexType.scala |    0
 .../createTable/TestRenameTableWithDataMap.scala   |    0
 .../CarbonIndexFileMergeTestCase.scala             |    0
 .../CompactionSupportGlobalSortBigFileTest.scala   |    0
 .../CompactionSupportGlobalSortFunctionTest.scala  |    0
 .../CompactionSupportGlobalSortParameterTest.scala |    8 +-
 .../CompactionSupportSpecifiedSegmentsTest.scala   |    0
 .../DataCompactionBlockletBoundryTest.scala        |    8 +-
 .../DataCompactionBoundaryConditionsTest.scala     |    9 +-
 .../DataCompactionCardinalityBoundryTest.scala     |    7 +-
 .../datacompaction/DataCompactionLockTest.scala    |    5 +-
 .../MajorCompactionIgnoreInMinorTest.scala         |    5 +-
 .../MajorCompactionStopsAfterCompaction.scala      |    5 +-
 .../MajorCompactionWithMeasureSortColumns.scala    |    0
 .../TableLevelCompactionOptionTest.scala           |    0
 .../dataload/TestDataLoadPartitionCoalescer.scala  |    0
 .../TestDataLoadWithColumnsMoreThanSchema.scala    |    0
 .../dataload/TestDataLoadWithFileName.scala        |    0
 .../TestDataWithDicExcludeAndInclude.scala         |    5 +-
 .../dataload/TestGlobalSortDataLoad.scala          |   14 +-
 .../testsuite/dataload/TestLoadDataFrame.scala     |    0
 .../dataload/TestLoadDataUseAllDictionary.scala    |    0
 .../TestLoadDataWithDiffTimestampFormat.scala      |    5 -
 .../TestLoadDataWithFileHeaderException.scala      |    0
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala  |   10 +-
 .../TestLoadDataWithHiveSyntaxUnsafe.scala         |    9 +-
 ...adDataWithMalformedCarbonCommandException.scala |    0
 .../dataload/TestLoadDataWithNoMeasure.scala       |    0
 .../TestLoadDataWithNotProperInputFile.scala       |    0
 .../spark/testsuite/dataload/TestLoadOptions.scala |    0
 .../dataload/TestLoadTblNameIsKeyword.scala        |    0
 .../dataload/TestLoadWithSortTempCompressed.scala  |    0
 .../dataload/TestRangeColumnDataLoad.scala         |    6 +-
 .../dataload/TestTableLevelBlockSize.scala         |    3 +-
 .../testsuite/dataload/TestTableLoadMinSize.scala  |    3 +-
 .../testsuite/datamap/CGDataMapTestCase.scala      |    1 +
 .../testsuite/datamap/DataMapWriterSuite.scala     |    0
 .../testsuite/datamap/FGDataMapTestCase.scala      |    1 +
 .../testsuite/datamap/TestDataMapCommand.scala     |    0
 .../testsuite/datamap/TestDataMapStatus.scala      |    0
 .../dataretention/DataRetentionTestCase.scala      |   29 +-
 .../dblocation/DBLocationCarbonTableTestCase.scala |    0
 .../deleteTable/TestDeleteTableNewDDL.scala        |    1 +
 .../describeTable/TestDescribeTable.scala          |    0
 .../detailquery/AllQueriesSpark2TestCase.scala     |    0
 .../testsuite/detailquery/CastColumnTestCase.scala |    8 +-
 .../ColumnPropertyValidationTestCase.scala         |    0
 .../detailquery/ExpressionWithNullTestCase.scala   |    0
 .../HighCardinalityDataTypesTestCase.scala         |    5 +-
 .../detailquery/IntegerDataTypeTestCase.scala      |    0
 .../detailquery/NoDictionaryColumnTestCase.scala   |    0
 .../RangeFilterAllDataTypesTestCases.scala         |   11 +-
 .../detailquery/RangeFilterTestCase.scala          |    2 -
 .../SubqueryWithFilterAndSortTestCase.scala        |    0
 .../ValueCompressionDataTypeTestCase.scala         |    0
 .../DateDataTypeDirectDictionaryTest.scala         |    9 +-
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |    4 +-
 ...rectDictionaryWithOffHeapSortDisabledTest.scala |    9 +-
 .../DateDataTypeNullDataTest.scala                 |    4 +-
 ...TimestampDataTypeDirectDictionaryTestCase.scala |   14 +-
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |   38 +-
 .../TimestampDataTypeNullDataTest.scala            |    0
 .../TimestampNoDictionaryColumnCastTestCase.scala  |    0
 .../TimestampNoDictionaryColumnTestCase.scala      |   28 +-
 .../filterexpr/AllDataTypesTestCaseFilter.scala    |    0
 .../testsuite/filterexpr/CountStarTestCase.scala   |    5 +-
 .../filterexpr/FilterProcessorTestCase.scala       |    3 +-
 .../filterexpr/GrtLtFilterProcessorTestCase.scala  |    2 -
 .../filterexpr/IntegerDataTypeTestCase.scala       |    0
 .../NullMeasureValueTestCaseFilter.scala           |    3 +-
 .../TestAndEqualFilterEmptyOperandValue.scala      |    2 -
 .../testsuite/filterexpr/TestBetweenFilter.scala   |    0
 .../testsuite/filterexpr/TestGrtLessFilter.scala   |    2 -
 .../filterexpr/TestImplicitFilterExpression.scala  |    0
 .../spark/testsuite/filterexpr/TestInFilter.scala  |    1 +
 .../testsuite/filterexpr/TestIsNullFilter.scala    |    0
 .../testsuite/filterexpr/TestNotNullFilter.scala   |    3 +-
 .../FlatFolderTableLoadingTestCase.scala           |    4 +-
 .../InsertIntoNonCarbonTableTestCase.scala         |    0
 .../testsuite/iud/DeleteCarbonTableTestCase.scala  |   10 +-
 .../iud/HorizontalCompactionTestCase.scala         |    0
 .../iud/TestInsertAndOtherCommandConcurrent.scala  |    0
 .../iud/TestUpdateAndDeleteWithLargeData.scala     |    0
 .../testsuite/iud/UpdateCarbonTableTestCase.scala  |   11 +-
 .../UpdateCarbonTableTestCaseWithBadRecord.scala   |    3 +-
 .../joinquery/AllDataTypesTestCaseJoin.scala       |    0
 .../joinquery/IntegerDataTypeTestCase.scala        |    0
 .../joinquery/JoinWithoutDictionaryColumn.scala    |    0
 .../testsuite/joinquery/OrderByLimitTestCase.scala |    0
 .../LocalDictionarySupportAlterTableTest.scala     |    0
 .../LocalDictionarySupportCreateTableTest.scala    |    0
 .../LocalDictionarySupportLoadTableTest.scala      |    5 +-
 .../longstring/VarcharDataTypesBasicTestCase.scala |    0
 .../NullMeasureValueTestCaseAggregate.scala        |    3 +-
 .../spark/testsuite/merge/MergeTestCase.scala      |    0
 .../TestNullValueSerialization.scala               |    2 -
 .../testsuite/partition/TestShowPartitions.scala   |    4 +
 .../partition/TestUpdateForPartitionTable.scala    |    0
 .../sdk/TestSDKWithTransactionalTable.scala        |    2 +-
 .../segmentreading/TestSegmentReading.scala        |    3 +-
 .../TestSegmentReadingForMultiThreading.scala      |    0
 .../testsuite/sortcolumns/TestSortColumns.scala    |   12 +-
 .../sortcolumns/TestSortColumnsWithUnsafe.scala    |    0
 .../sortexpr/AllDataTypesTestCaseSort.scala        |    0
 .../sortexpr/IntegerDataTypeTestCase.scala         |    0
 .../StandardPartitionBadRecordLoggerTest.scala     |    0
 .../StandardPartitionComplexDataTypeTestCase.scala |    0
 .../StandardPartitionGlobalSortTestCase.scala      |    7 +-
 .../StandardPartitionTableCleanTestCase.scala      |    8 +-
 .../StandardPartitionTableCompactionTestCase.scala |   11 +-
 .../StandardPartitionTableDropTestCase.scala       |    5 +
 .../StandardPartitionTableLoadingTestCase.scala    |    4 +-
 .../StandardPartitionTableOverwriteTestCase.scala  |    5 +
 .../StandardPartitionTableQueryTestCase.scala      |   19 +-
 .../windowsexpr/WindowsExprTestCase.scala          |    6 +-
 .../spark/util/DataTypeConverterUtilSuite.scala    |    0
 .../util/ExternalColumnDictionaryTestCase.scala    |    0
 .../sql/commands/StoredAsCarbondataSuite.scala     |    0
 .../sql/commands/TestCarbonDropCacheCommand.scala  |    0
 .../sql/commands/TestCarbonShowCacheCommand.scala  |    0
 .../sql/commands/UsingCarbondataSuite.scala        |    0
 .../carbondata/store/SparkCarbonStoreTest.scala    |    0
 .../indexserver/DistributedRDDUtilsTest.scala      |    0
 .../scala/org/apache/spark/SparkCommandSuite.scala |    4 +-
 .../carbondata/BadRecordPathLoadOptionTest.scala   |   16 +-
 .../spark/carbondata/CarbonDataSourceSuite.scala   |    7 +-
 .../carbondata/DataLoadFailAllTypeSortTest.scala   |   17 +-
 .../spark/carbondata/TableStatusBackupTest.scala   |    0
 .../carbondata/TestStreamingTableOpName.scala      |   18 +-
 .../carbondata/TestStreamingTableQueryFilter.scala |    0
 .../TestStreamingTableWithLongString.scala         |    4 +-
 .../TestStreamingTableWithRowParser.scala          |    0
 .../bucketing/TableBucketingTestCase.scala         |    9 +-
 .../carbondata/commands/SetCommandTestCase.scala   |   12 +-
 .../datatype/NumericDimensionBadRecordTest.scala   |   10 +-
 .../deletetable/DeleteTableTestCase.scala          |    4 +-
 .../iud/DeleteCarbonTableSubqueryTestCase.scala    |    6 +-
 .../carbondata/query/SubQueryJoinTestSuite.scala   |    4 +-
 .../spark/carbondata/query/SubQueryTestSuite.scala |    4 +-
 .../carbondata/query/TestNotEqualToFilter.scala    |    4 +-
 .../register/TestRegisterCarbonTable.scala         |    0
 .../restructure/AlterTableRevertTestCase.scala     |    4 +-
 .../restructure/AlterTableUpgradeSegmentTest.scala |    0
 .../restructure/AlterTableValidationTestCase.scala |    9 +-
 .../vectorreader/AddColumnTestCases.scala          |   16 +-
 .../AlterTableColumnRenameTestCase.scala           |    4 +-
 .../vectorreader/ChangeDataTypeTestCases.scala     |    9 +-
 .../vectorreader/DropColumnTestCases.scala         |    8 +-
 .../vectorreader/VectorReaderTestCase.scala        |    9 +-
 .../apache/spark/sql/CarbonExtensionSuite.scala    |    0
 .../sql/CarbonGetTableDetailComandTestCase.scala   |    0
 .../spark/sql/GetDataSizeAndIndexSizeTest.scala    |    0
 .../SparkCarbonDataSourceBinaryTest.scala          |  743 +++++++
 .../datasource/SparkCarbonDataSourceTest.scala     | 2237 ++++++++++++++++++++
 ...TestCreateTableUsingSparkCarbonFileFormat.scala |  177 +-
 .../org/apache/spark/sql/common/util/Tags.scala    |    0
 .../command/CarbonTableSchemaCommonSuite.scala     |    0
 .../mutation/CarbonTruncateCommandTest.scala       |    8 +-
 .../apache/spark/sql/profiler/ProfilerSuite.scala  |    0
 .../org/apache/spark/util/CarbonCommandSuite.scala |   11 +-
 .../org/apache/spark/util/SparkUtil4Test.scala     |    0
 .../org/apache/spark/util/SparkUtilTest.scala      |    0
 integration/spark2/pom.xml                         |  339 ---
 ...apache.spark.sql.test.TestQueryExecutorRegister |   17 -
 .../spark/sql/common/util/Spark2QueryTest.scala    |   27 -
 .../mv/rewrite/TestAllOperationsOnMV.scala         |    7 -
 pom.xml                                            |   71 +-
 .../carbondata/lcm/locks/LocalFileLockTest.java    |    1 -
 python/README.md                                   |    6 +-
 python/pycarbon/tests/__init__.py                  |    2 +-
 .../pycarbon/tests/sdk/test_read_write_carbon.py   |    2 +-
 tools/cli/pom.xml                                  |    2 +-
 928 files changed, 4039 insertions(+), 4726 deletions(-)

diff --git a/assembly/pom.xml b/assembly/pom.xml
index 5fbc26a..fa14171 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -58,7 +58,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/conf/dataload.properties.template b/conf/dataload.properties.template
index 1a165a8..6145d33 100644
--- a/conf/dataload.properties.template
+++ b/conf/dataload.properties.template
@@ -18,7 +18,7 @@
 
 #carbon store path
 # you should change to the code path of your local machine
-carbon.storelocation=/home/david/Documents/carbondata/examples/spark2/target/store
+carbon.storelocation=/home/david/Documents/carbondata/examples/spark/target/store
 
 #csv delimiter character
 delimiter=,
diff --git a/docs/alluxio-guide.md b/docs/alluxio-guide.md
index bad1fc0..1fb8187 100644
--- a/docs/alluxio-guide.md
+++ b/docs/alluxio-guide.md
@@ -34,7 +34,7 @@ This tutorial provides a brief introduction to using Alluxio.
  - Access the Alluxio web: [http://localhost:19999/home](http://localhost:19999/home)   
 
 ### Running Example
- - Please refer to [AlluxioExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala)
+ - Please refer to [AlluxioExample](https://github.com/apache/carbondata/blob/master/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala)
 
 ## CarbonData supports alluxio by spark-shell
 
@@ -50,7 +50,7 @@ This tutorial provides a brief introduction to using Alluxio.
 ### Running spark-shell
  - Running the command in spark path
  ```$command
-./bin/spark-shell --jars ${CARBONDATA_PATH}/assembly/target/scala-2.11/apache-carbondata-1.6.0-SNAPSHOT-bin-spark2.3.4-hadoop2.7.2.jar,${ALLUXIO_PATH}/client/alluxio-1.8.1-client.jar
+./bin/spark-shell --jars ${CARBONDATA_PATH}/assembly/target/scala-2.11/apache-carbondata-2.0.0-SNAPSHOT-bin-spark2.3.4-hadoop2.7.2.jar,${ALLUXIO_PATH}/client/alluxio-1.8.1-client.jar
 ```
  - Testing use alluxio by CarbonSession
  ```$scala
@@ -59,7 +59,7 @@ import org.apache.spark.sql.SparkSession
 	
 val carbon = SparkSession.builder().master("local").appName("test").getOrCreateCarbonSession("alluxio://localhost:19998/carbondata");
 carbon.sql("CREATE TABLE carbon_alluxio(id String,name String, city String,age Int) STORED as carbondata");
-carbon.sql(s"LOAD DATA LOCAL INPATH '${CARBONDATA_PATH}/integration/spark-common-test/src/test/resources/sample.csv' into table carbon_alluxio");
+carbon.sql(s"LOAD DATA LOCAL INPATH '${CARBONDATA_PATH}/integration/spark/src/test/resources/sample.csv' into table carbon_alluxio");
 carbon.sql("select * from carbon_alluxio").show
 ```
  - Result
@@ -96,9 +96,9 @@ carbon.sql("select * from carbon_alluxio").show
 ```$command
 ./bin/spark-submit \
 --master local \
---jars ${ALLUXIO_PATH}/client/alluxio-1.8.1-client.jar,${CARBONDATA_PATH}/examples/spark2/target/carbondata-examples-1.6.0-SNAPSHOT.jar \
+--jars ${ALLUXIO_PATH}/client/alluxio-1.8.1-client.jar,${CARBONDATA_PATH}/examples/spark/target/carbondata-examples-2.0.0-SNAPSHOT.jar \
 --class org.apache.carbondata.examples.AlluxioExample \
-${CARBONDATA_PATH}/assembly/target/scala-2.11/apache-carbondata-1.6.0-SNAPSHOT-bin-spark2.3.4-hadoop2.7.2.jar \
+${CARBONDATA_PATH}/assembly/target/scala-2.11/apache-carbondata-2.0.0-SNAPSHOT-bin-spark2.3.4-hadoop2.7.2.jar \
 false
 ```
 **NOTE**: Please set runShell as false, which can avoid dependency on alluxio shell module.
diff --git a/docs/ddl-of-carbondata.md b/docs/ddl-of-carbondata.md
index 6f74266..2f4cba1 100644
--- a/docs/ddl-of-carbondata.md
+++ b/docs/ddl-of-carbondata.md
@@ -76,7 +76,7 @@ CarbonData DDL statements are documented here,which includes:
   [LOCATION 'path']
   ```
 
-  **NOTE:** CarbonData also supports "STORED AS carbondata" and "USING carbondata". Find example code at [CarbonSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala) in the CarbonData repo.
+  **NOTE:** CarbonData also supports "STORED AS carbondata" and "USING carbondata". Find example code at [CarbonSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala) in the CarbonData repo.
 ### Usage Guidelines
 
 **Supported properties:**
@@ -241,7 +241,7 @@ CarbonData DDL statements are documented here,which includes:
                   'SORT_SCOPE'='NO_SORT')
    ```
 
-   **NOTE:** CarbonData also supports "using carbondata". Find example code at [SparkSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala) in the CarbonData repo.
+   **NOTE:** CarbonData also supports "using carbondata". Find example code at [SparkSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala) in the CarbonData repo.
 
    - ##### Table Block Size Configuration
 
diff --git a/docs/documentation.md b/docs/documentation.md
index a40eed1..4141ad0 100644
--- a/docs/documentation.md
+++ b/docs/documentation.md
@@ -27,7 +27,7 @@ Apache CarbonData is a new big data file format for faster interactive query usi
 
 **File Format Concepts:** Start with the basics of understanding the [CarbonData file format](./file-structure-of-carbondata.md#carbondata-file-format) and its [storage structure](./file-structure-of-carbondata.md). This will help to understand other parts of the documentation, including deployment, programming and usage guides. 
 
-**Quick Start:** [Run an example program](./quick-start-guide.md#installing-and-configuring-carbondata-to-run-locally-with-spark-shell) on your local machine or [study some examples](https://github.com/apache/carbondata/tree/master/examples/spark2/src/main/scala/org/apache/carbondata/examples).
+**Quick Start:** [Run an example program](./quick-start-guide.md#installing-and-configuring-carbondata-to-run-locally-with-spark-shell) on your local machine or [study some examples](https://github.com/apache/carbondata/tree/master/examples/spark/src/main/scala/org/apache/carbondata/examples).
 
 **CarbonData SQL Language Reference:** CarbonData extends the Spark SQL language and adds several [DDL](./ddl-of-carbondata.md) and [DML](./dml-of-carbondata.md) statements to support operations on it. Refer to the [Reference Manual](./language-manual.md) to understand the supported features and functions.
 
diff --git a/docs/datamap-developer-guide.md b/docs/index-developer-guide.md
similarity index 89%
rename from docs/datamap-developer-guide.md
rename to docs/index-developer-guide.md
index d1748c1..d48c890 100644
--- a/docs/datamap-developer-guide.md
+++ b/docs/index-developer-guide.md
@@ -15,7 +15,7 @@
     limitations under the License.
 -->
 
-# DataMap Developer Guide
+# Index Developer Guide
 
 ### Introduction
 DataMap is a data structure that can be used to accelerate certain query of the table. Different DataMap can be implemented by developers. 
@@ -23,12 +23,12 @@ Currently, there are two types of DataMap supported:
 1. IndexDataMap: DataMap that leverages index to accelerate filter query. Lucene DataMap and BloomFiler DataMap belong to this type of DataMaps.
 2. MVDataMap: DataMap that leverages Materialized View to accelerate olap style query, like SPJG query (select, predicate, join, groupby). Preaggregate, timeseries and mv DataMap belong to this type of DataMaps.
 
-### DataMap Provider
+### Index Provider
 When user issues `CREATE DATAMAP dm ON TABLE main USING 'provider'`, the corresponding DataMapProvider implementation will be created and initialized. 
 Currently, the provider string can be:
 1. class name IndexDataMapFactory implementation: Developer can implement new type of IndexDataMap by extending IndexDataMapFactory
 
 When user issues `DROP DATAMAP dm ON TABLE main`, the corresponding DataMapProvider interface will be called.
 
-Click for more details about [DataMap Management](./datamap/datamap-management.md#datamap-management) and supported [DSL](./datamap/datamap-management.md#overview).
+Click for more details about [DataMap Management](./index/index-management.md#index-management) and supported [DSL](./index/index-management.md#overview).
 
diff --git a/docs/index-server.md b/docs/index-server.md
index f80f67d..62e239d 100644
--- a/docs/index-server.md
+++ b/docs/index-server.md
@@ -198,11 +198,11 @@ that will authenticate the user to access the index server and no other service.
   
 ## Starting the Server
 ``` 
-./bin/spark-submit --master [yarn/local] --[optional parameters] --class org.apache.carbondata.indexserver.IndexServer [path to carbondata-spark2-<version>.jar]
+./bin/spark-submit --master [yarn/local] --[optional parameters] --class org.apache.carbondata.indexserver.IndexServer [path to carbondata-spark-<version>.jar]
 ```
 Or 
 ``` 
-./sbin/start-indexserver.sh --master yarn --num-executors 2 /<absolute path>/carbondata-spark2-1.6.0.0100.jar
+./sbin/start-indexserver.sh --master yarn --num-executors 2 /<absolute path>/carbondata-spark-<version>.jar
 ```
 
 ## FAQ
diff --git a/docs/datamap/bloomfilter-datamap-guide.md b/docs/index/bloomfilter-index-guide.md
similarity index 100%
rename from docs/datamap/bloomfilter-datamap-guide.md
rename to docs/index/bloomfilter-index-guide.md
diff --git a/docs/datamap/datamap-management.md b/docs/index/index-management.md
similarity index 99%
rename from docs/datamap/datamap-management.md
rename to docs/index/index-management.md
index 768ad06..01f3604 100644
--- a/docs/datamap/datamap-management.md
+++ b/docs/index/index-management.md
@@ -15,7 +15,7 @@
     limitations under the License.
 -->
 
-# CarbonData DataMap Management
+# CarbonData Index Management
 
 - [Overview](#overview)
 - [DataMap Management](#datamap-management)
diff --git a/docs/datamap/lucene-datamap-guide.md b/docs/index/lucene-index-guide.md
similarity index 100%
rename from docs/datamap/lucene-datamap-guide.md
rename to docs/index/lucene-index-guide.md
diff --git a/docs/datamap/mv-datamap-guide.md b/docs/index/mv-guide.md
similarity index 100%
rename from docs/datamap/mv-datamap-guide.md
rename to docs/index/mv-guide.md
diff --git a/docs/introduction.md b/docs/introduction.md
index 7c11718..a409f6c 100644
--- a/docs/introduction.md
+++ b/docs/introduction.md
@@ -69,15 +69,15 @@ CarbonData has rich set of features to support various use cases in Big Data ana
 
   CarbonData can read any carbondata file and automatically infer schema from the file and provide a relational table view to perform sql queries using Spark or any other applicaion.
 
-### DataMaps
+### Index
 
 - ##### Bloom filter
 
-  CarbonData supports bloom filter as a datamap in order to quickly and efficiently prune the data for scanning and acheive faster query performance.
+  CarbonData supports bloom filter index in order to quickly and efficiently prune the data for scanning and acheive faster query performance.
 
 - ##### Lucene
 
-  Lucene is popular for indexing text data which are long.CarbonData provides a lucene datamap so that text columns can be indexed using lucene and use the index result for efficient pruning of data to be retrieved during query.
+  Lucene is popular for indexing text data which are long.CarbonData supports lucene index so that text columns can be indexed using lucene and use the index result for efficient pruning of data to be retrieved during query.
 
 - ##### MV (Materialized Views)
 
diff --git a/docs/language-manual.md b/docs/language-manual.md
index d85c358..d8f30b0 100644
--- a/docs/language-manual.md
+++ b/docs/language-manual.md
@@ -24,9 +24,9 @@ CarbonData has its own parser, in addition to Spark's SQL Parser, to parse and p
 - [Data Types](./supported-data-types-in-carbondata.md)
 - Data Definition Statements
   - [DDL:](./ddl-of-carbondata.md)[Create](./ddl-of-carbondata.md#create-table),[Drop](./ddl-of-carbondata.md#drop-table),[Partition](./ddl-of-carbondata.md#partition),[Bucketing](./ddl-of-carbondata.md#bucketing),[Alter](./ddl-of-carbondata.md#alter-table),[CTAS](./ddl-of-carbondata.md#create-table-as-select),[External Table](./ddl-of-carbondata.md#create-external-table)
-  - [DataMaps](./datamap/datamap-management.md)
-    - [Bloom](./datamap/bloomfilter-datamap-guide.md)
-    - [Lucene](./datamap/lucene-datamap-guide.md)
+  - [Index](./index/index-management.md)
+    - [Bloom](./index/bloomfilter-index-guide.md)
+    - [Lucene](./index/lucene-index-guide.md)
   - Materialized Views (MV)
   - [Streaming](./streaming-guide.md)
 - Data Manipulation Statements
diff --git a/docs/performance-tuning.md b/docs/performance-tuning.md
index 7059605..f485388 100644
--- a/docs/performance-tuning.md
+++ b/docs/performance-tuning.md
@@ -127,7 +127,7 @@
   ```
 
   **NOTE:**
-  + BloomFilter can be created to enhance performance for queries with precise equal/in conditions. You can find more information about it in BloomFilter datamap [document](./datamap/bloomfilter-datamap-guide.md).
+  + BloomFilter can be created to enhance performance for queries with precise equal/in conditions. You can find more information about it in BloomFilter index [document](./index/bloomfilter-index-guide.md).
 
 
 ## Configuration for Optimizing Data Loading performance for Massive Data
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 357f297..cdd5be2 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -24,8 +24,8 @@ CarbonData provides SDK to facilitate
 
 # SDK Writer
 
-In the carbon jars package, there exist a carbondata-store-sdk-x.x.x-SNAPSHOT.jar, including SDK writer and reader. 
-If user want to use SDK, except carbondata-store-sdk-x.x.x-SNAPSHOT.jar, 
+In the carbon jars package, there exist a carbondata-sdk-x.x.x-SNAPSHOT.jar, including SDK writer and reader. 
+If user want to use SDK, except carbondata-sdk-x.x.x-SNAPSHOT.jar, 
 it needs carbondata-core-x.x.x-SNAPSHOT.jar, carbondata-common-x.x.x-SNAPSHOT.jar, 
 carbondata-format-x.x.x-SNAPSHOT.jar, carbondata-hadoop-x.x.x-SNAPSHOT.jar and carbondata-processing-x.x.x-SNAPSHOT.jar.
 What's more, user also can use carbondata-sdk.jar directly.
@@ -246,7 +246,7 @@ Instead of creating table and query it, you can also query that file directly wi
 ```
 SELECT * FROM carbonfile.`$Path`
 ```
-Find example code at [DirectSQLExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala) in the CarbonData repo.
+Find example code at [DirectSQLExample](https://github.com/apache/carbondata/blob/master/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala) in the CarbonData repo.
 ## API List
 
 ### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
@@ -612,10 +612,10 @@ while (reader.hasNext()) {
 reader.close();
 ```
 
-Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java) in the CarbonData repo.
+Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/blob/master/examples/spark/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java) in the CarbonData repo.
 
 SDK reader also supports reading carbondata files and filling it to apache arrow vectors.
-Find example code at [ArrowCarbonReaderTest](https://github.com/apache/carbondata/blob/master/store/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java) in the CarbonData repo.
+Find example code at [ArrowCarbonReaderTest](https://github.com/apache/carbondata/blob/master/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java) in the CarbonData repo.
 
 
 ## API List
@@ -1007,7 +1007,7 @@ public Field(String name, String type);
 public Field(ColumnSchema columnSchema);
 ```
 
-Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.
+Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.
 
 
 # Common API List for CarbonReader and CarbonWriter
diff --git a/docs/streaming-guide.md b/docs/streaming-guide.md
index d007e03..97618bb 100644
--- a/docs/streaming-guide.md
+++ b/docs/streaming-guide.md
@@ -39,7 +39,7 @@
 ## Quick example
 Download and unzip spark-2.4.4-bin-hadoop2.7.tgz, and export $SPARK_HOME
 
-Package carbon jar, and copy assembly/target/scala-2.11/carbondata_2.11-1.6.0-SNAPSHOT-shade-hadoop2.7.2.jar to $SPARK_HOME/jars
+Package carbon jar, and copy assembly/target/scala-2.11/carbondata_2.11-2.0.0-SNAPSHOT-shade-hadoop2.7.2.jar to $SPARK_HOME/jars
 ```shell
 mvn clean package -DskipTests -Pspark-2.4
 ```
@@ -274,7 +274,7 @@ ALTER TABLE streaming_table COMPACT 'close_streaming'
 1. reject set streaming property from true to false.
 2. reject UPDATE/DELETE command on the streaming table.
 3. reject create MV on the streaming table.
-4. reject add the streaming property on the table with MV DataMap.
+4. reject add the streaming property on the table with MV.
 5. if the table has dictionary columns, it will not support concurrent data loading.
 6. block delete "streaming" segment while the streaming ingestion is running.
 7. block drop the streaming table while the streaming ingestion is running.
diff --git a/examples/flink/pom.xml b/examples/flink/pom.xml
index a9f04ca..f87f68b 100644
--- a/examples/flink/pom.xml
+++ b/examples/flink/pom.xml
@@ -52,7 +52,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/integration/flink/pom.xml b/integration/flink/pom.xml
index 0acbd53..d832385 100644
--- a/integration/flink/pom.xml
+++ b/integration/flink/pom.xml
@@ -112,7 +112,7 @@
         </dependency>
         <dependency>
             <groupId>org.apache.carbondata</groupId>
-            <artifactId>carbondata-store-sdk</artifactId>
+            <artifactId>carbondata-sdk</artifactId>
             <version>${project.version}</version>
             <exclusions>
                 <exclusion>
@@ -201,7 +201,7 @@
             <dependencies>
                 <dependency>
                     <groupId>org.apache.carbondata</groupId>
-                    <artifactId>carbondata-spark2</artifactId>
+                    <artifactId>carbondata-spark</artifactId>
                     <version>${project.version}</version>
                     <scope>test</scope>
                     <exclusions>
@@ -218,7 +218,7 @@
             <dependencies>
                 <dependency>
                     <groupId>org.apache.carbondata</groupId>
-                    <artifactId>carbondata-spark2</artifactId>
+                    <artifactId>carbondata-spark</artifactId>
                     <version>${project.version}</version>
                     <scope>test</scope>
                     <exclusions>
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
index f3cbf44..a607c83 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
@@ -94,6 +94,8 @@ class CarbonObjectInspector extends SettableStructObjectInspector {
       return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
     } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
       return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      return PrimitiveObjectInspectorFactory.writableByteObjectInspector;
     } else if (typeInfo instanceof VarcharTypeInfo) {
       return new WritableHiveVarcharObjectInspector((VarcharTypeInfo) typeInfo);
     } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
diff --git a/integration/presto/pom.xml b/integration/presto/pom.xml
index a98feeb..110e4de 100644
--- a/integration/presto/pom.xml
+++ b/integration/presto/pom.xml
@@ -504,7 +504,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
+      <artifactId>carbondata-sdk</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
diff --git a/integration/spark-common-cluster-test/pom.xml b/integration/spark-common-cluster-test/pom.xml
index 758e3f5..87214d1 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -37,13 +37,7 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark-datasource</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark-common</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
@@ -82,13 +76,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
+      <artifactId>carbondata-sdk</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
index 3a211e2..05e5560 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
@@ -42,7 +42,7 @@ import org.apache.carbondata.sdk.file.CarbonWriter
 
 class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
 
-  val filePath = TestQueryExecutor.integrationPath + "/spark-common-test/src/test/resources"
+  val filePath = TestQueryExecutor.integrationPath + "/spark/src/test/resources"
   val writerPath =
     s"${ resourcesPath }" + "/SparkCarbonFileFormat/WriterOutputComplex/"
 
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/datasource/SparkCarbonDataSourceTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/datasource/SparkCarbonDataSourceTestCase.scala
index aa271c4..1e50100 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/datasource/SparkCarbonDataSourceTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/datasource/SparkCarbonDataSourceTestCase.scala
@@ -40,7 +40,7 @@ import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
 class SparkCarbonDataSourceTestCase extends FunSuite with BeforeAndAfterAll {
   import spark._
 
-  val warehouse1 = s"${TestQueryExecutor.projectPath}/integration/spark-datasource/target/warehouse"
+  val warehouse1 = s"${TestQueryExecutor.projectPath}/integration/spark/target/warehouse"
 
   test("test write using dataframe") {
     import sqlContext.implicits._
@@ -617,7 +617,7 @@ class SparkCarbonDataSourceTestCase extends FunSuite with BeforeAndAfterAll {
       "double, HQ_DEPOSIT double) row format delimited fields terminated by ',' collection items " +
       "terminated by '$'")
     val sourceFile = FileFactory
-      .getPath(s"$resource" + "../../../../../spark-datasource/src/test/resources/Array.csv")
+      .getPath(s"$resource" + "../../../../../spark/src/test/resources/Array.csv")
       .toString
     sql(s"load data local inpath '$sourceFile' into table array_com_hive")
     sql(
@@ -644,7 +644,7 @@ class SparkCarbonDataSourceTestCase extends FunSuite with BeforeAndAfterAll {
       "terminated by '$' map keys terminated by '&'")
     val sourceFile = FileFactory
       .getPath(
-        s"$resource" + "../../../../../spark-datasource/src/test/resources/structofarray.csv")
+        s"$resource" + "../../../../../spark/src/test/resources/structofarray.csv")
       .toString
     sql(s"load data local inpath '$sourceFile' into table STRUCT_OF_ARRAY_com_hive")
     sql(
@@ -1274,7 +1274,7 @@ class SparkCarbonDataSourceTestCase extends FunSuite with BeforeAndAfterAll {
       "terminated by ',' LINES terminated by '\n' stored as textfile")
     val sourceFile = FileFactory
       .getPath(s"$resource" +
-               "../../../../../spark-datasource/src/test/resources/vardhandaterestruct.csv")
+               "../../../../../spark/src/test/resources/vardhandaterestruct.csv")
       .toString
     sql(s"load data local inpath '$sourceFile' into table fileformat_drop_hive")
     sql(
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/DataSourceTestUtil.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/DataSourceTestUtil.scala
index 8a5b154..94fb72d 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/DataSourceTestUtil.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/DataSourceTestUtil.scala
@@ -35,10 +35,10 @@ object DataSourceTestUtil {
 
   val rootPath = new File(this.getClass.getResource("/").getPath
                           + "../../../..").getCanonicalPath
-  val warehouse1 = FileFactory.getPath(s"$rootPath/integration/spark-datasource/target/warehouse")
+  val warehouse1 = FileFactory.getPath(s"$rootPath/integration/spark/target/warehouse")
     .toString
-  val resource = s"$rootPath/integration/spark-datasource/src/test/resources"
-  val metaStoreDB1 = s"$rootPath/integration/spark-datasource/target"
+  val resource = s"$rootPath/integration/spark/src/test/resources"
+  val metaStoreDB1 = s"$rootPath/integration/spark/target"
   val spark = SparkSession
     .builder()
     .enableHiveSupport()
diff --git a/integration/spark-common-test/src/test/resources/structofarray.csv b/integration/spark-common-test/src/test/resources/structofarray.csv
deleted file mode 100644
index ef21b44..0000000
--- a/integration/spark-common-test/src/test/resources/structofarray.csv
+++ /dev/null
@@ -1,10 +0,0 @@
-Cust00000000000000000000,2015,1,20,M,SSC,Y,123456789$2015-01-01  00:00:00$100&3000$100.123&3000.234$United Kingdom&England$2015-01-01  00:00:00&2014-01-01  00:00:00,42,104,160,325046028.8,859616748.6
-Cust00000000000000000001,2015,1,30,F,Degree,N,123456790$2015-01-02  00:00:00$101&3000$101.123&3001.234$United States&MO$2015-01-02  00:00:00&2014-01-02  00:00:00,141,181,54,378476092.1,818599132.6
-Cust00000000000000000002,2015,1,40,M,graduation,D,123456791$2015-01-03  00:00:00$102&3000$102.123&3002.234$United States&OR$2015-01-03  00:00:00&2014-01-03  00:00:00,138,43,175,408335001.4,906020942.6
-Cust00000000000000000003,2015,1,50,F,PG,Y,123456792$2015-01-04  00:00:00$103&3000$103.123&3003.234$Australia&Victoria$2015-01-04  00:00:00&2014-01-04  00:00:00,96,63,184,493146274.5,556184083.3
-Cust00000000000000000004,2015,1,60,M,MS,N,123456793$2015-01-05  00:00:00$104&3000$104.123&3004.234$United States&AL$2015-01-05  00:00:00&2014-01-05  00:00:00,115,172,165,457941392.3,641744932.5
-Cust00000000000000000005,2015,1,70,F,Doctor,D,123456794$2015-01-06  00:00:00$105&3000$105.123&3005.234$United States&NJ$2015-01-06  00:00:00&2014-01-06  00:00:00,178,192,178,112452170.2,502438883.3
-Cust00000000000000000006,2015,1,80,M,Layer,Y,123456795$2015-01-07  00:00:00$106&3000$106.123&3006.234$United States&IL$2015-01-07  00:00:00&2014-01-07  00:00:00,172,194,49,943273831.2,37711205.33
-Cust00000000000000000007,2015,1,90,F,Cop,N,123456796$2015-01-08  00:00:00$107&3000$107.123&3007.234$United States&TN$2015-01-08  00:00:00&2014-01-08  00:00:00,163,23,180,991766321.3,452456856.7
-Cust00000000000000000008,2015,1,95,M,Bank,D,123456797$2015-01-09  00:00:00$108&3000$108.123&3008.234$Israel&Tel Aviv$2015-01-09  00:00:00&2014-01-09  00:00:00,113,18,176,747561503.5,388896200.6
-Cust00000000000000000009,2015,1,45,F,Group1,Y,123456798$2015-01-10  00:00:00$109&3000$109.123&3009.234$France&Ile-de-France$2015-01-10  00:00:00&2014-01-10  00:00:00,50,99,10,667010292.4,910085933.7
\ No newline at end of file
diff --git a/integration/spark-common/pom.xml b/integration/spark-common/pom.xml
deleted file mode 100644
index 5fddef0..0000000
--- a/integration/spark-common/pom.xml
+++ /dev/null
@@ -1,298 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-    Licensed to the Apache Software Foundation (ASF) under one or more
-    contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership.
-    The ASF licenses this file to You under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with
-    the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.apache.carbondata</groupId>
-    <artifactId>carbondata-parent</artifactId>
-    <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-
-  <artifactId>carbondata-spark-common</artifactId>
-  <name>Apache CarbonData :: Spark Common</name>
-
-  <properties>
-    <dev.path>${basedir}/../../dev</dev.path>
-    <jacoco.append>true</jacoco.append>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-hive</artifactId>
-      <version>${project.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.commons</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-exec</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-service</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-geo</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-streaming</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark-datasource</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-compiler</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-reflect</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <scope>provided</scope>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <resources>
-      <resource>
-        <directory>src/resources</directory>
-      </resource>
-      <resource>
-        <directory>.</directory>
-        <includes>
-          <include>CARBON_SPARK_INTERFACELogResource.properties</include>
-        </includes>
-      </resource>
-    </resources>
-    <plugins>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <version>2.15.2</version>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <phase>compile</phase>
-          </execution>
-          <execution>
-            <id>testCompile</id>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-            <phase>test</phase>
-          </execution>
-          <execution>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <source>1.8</source>
-          <target>1.8</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <version>3.0.0</version>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <phase>compile</phase>
-            <goals>
-              <goal>copy</goal>
-            </goals>
-            <configuration>
-              <artifactItems>
-                <artifactItem>
-                  <groupId>org.apache.carbondata</groupId>
-                  <artifactId>carbondata-format</artifactId>
-                  <version>${project.version}</version>
-                </artifactItem>
-                <artifactItem>
-                  <groupId>com.google.code.gson</groupId>
-                  <artifactId>gson</artifactId>
-                  <version>2.4</version>
-                </artifactItem>
-                <artifactItem>
-                  <groupId>org.xerial.snappy</groupId>
-                  <artifactId>snappy-java</artifactId>
-                  <version>${snappy.version}</version>
-                </artifactItem>
-              </artifactItems>
-              <outputDirectory>${project.build.directory}/jars</outputDirectory>
-              <!-- other configurations here -->
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
-        <!-- Note config is repeated in scalatest config -->
-        <configuration>
-          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m </argLine>
-          <systemProperties>
-            <java.awt.headless>true</java.awt.headless>
-          </systemProperties>
-          <failIfNoTests>false</failIfNoTests>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-
-  <profiles>
-    <profile>
-      <id>build-all</id>
-      <properties>
-        <spark.version>2.3.4</spark.version>
-        <scala.binary.version>2.11</scala.binary.version>
-        <scala.version>2.11.8</scala.version>
-      </properties>
-    </profile>
-    <profile>
-      <id>sdvtest</id>
-      <properties>
-        <maven.test.skip>true</maven.test.skip>
-      </properties>
-    </profile>
-    <profile>
-      <id>spark-2.3</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <spark.version>2.3.4</spark.version>
-        <scala.binary.version>2.11</scala.binary.version>
-        <scala.version>2.11.8</scala.version>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <configuration>
-              <excludes>
-                <exclude>src/main/spark2.4</exclude>
-              </excludes>
-            </configuration>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>build-helper-maven-plugin</artifactId>
-            <version>3.0.0</version>
-            <executions>
-              <execution>
-                <id>add-source</id>
-                <phase>generate-sources</phase>
-                <goals>
-                  <goal>add-source</goal>
-                </goals>
-                <configuration>
-                  <sources>
-                    <source>src/main/spark2.3</source>
-                  </sources>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-    <profile>
-      <id>spark-2.4</id>
-      <properties>
-        <spark.version>2.4.4</spark.version>
-        <scala.binary.version>2.11</scala.binary.version>
-        <scala.version>2.11.8</scala.version>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-            <configuration>
-              <excludes>
-                <exclude>src/main/spark2.3</exclude>
-              </excludes>
-            </configuration>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>build-helper-maven-plugin</artifactId>
-            <version>3.0.0</version>
-            <executions>
-              <execution>
-                <id>add-source</id>
-                <phase>generate-sources</phase>
-                <goals>
-                  <goal>add-source</goal>
-                </goals>
-                <configuration>
-                  <sources>
-                    <source>src/main/spark2.4</source>
-                  </sources>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
-</project>
diff --git a/integration/spark-datasource/pom.xml b/integration/spark-datasource/pom.xml
deleted file mode 100644
index 1f1cac3..0000000
--- a/integration/spark-datasource/pom.xml
+++ /dev/null
@@ -1,214 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-    Licensed to the Apache Software Foundation (ASF) under one or more
-    contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership.
-    The ASF licenses this file to You under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with
-    the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.apache.carbondata</groupId>
-    <artifactId>carbondata-parent</artifactId>
-    <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-
-  <artifactId>carbondata-spark-datasource</artifactId>
-  <name>Apache CarbonData :: Spark Datasource</name>
-
-  <properties>
-    <dev.path>${basedir}/../../dev</dev.path>
-    <jacoco.append>true</jacoco.append>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-hadoop</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-repl_${scala.binary.version}</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-aws</artifactId>
-      <version>${hadoop.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>com.fasterxml.jackson.core</groupId>
-          <artifactId>jackson-core</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.fasterxml.jackson.core</groupId>
-          <artifactId>jackson-annotations</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.fasterxml.jackson.core</groupId>
-          <artifactId>jackson-databind</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <resources>
-      <resource>
-        <directory>src/resources</directory>
-      </resource>
-      <resource>
-        <directory>.</directory>
-        <includes>
-          <include>CARBON_SPARK_INTERFACELogResource.properties</include>
-        </includes>
-      </resource>
-    </resources>
-    <plugins>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <version>2.15.2</version>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <phase>compile</phase>
-          </execution>
-          <execution>
-            <id>testCompile</id>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-            <phase>test</phase>
-          </execution>
-          <execution>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <source>1.8</source>
-          <target>1.8</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
-        <!-- Note config is repeated in scalatest config -->
-        <configuration>
-          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
-          <systemProperties>
-            <java.awt.headless>true</java.awt.headless>
-            <spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
-          </systemProperties>
-          <failIfNoTests>false</failIfNoTests>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.scalatest</groupId>
-        <artifactId>scalatest-maven-plugin</artifactId>
-        <version>1.0</version>
-        <!-- Note config is repeated in surefire config -->
-        <configuration>
-          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <junitxml>.</junitxml>
-          <filereports>CarbonTestSuite.txt</filereports>
-          <argLine> ${argLine} -ea -Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m
-          </argLine>
-          <stderr />
-          <environmentVariables>
-          </environmentVariables>
-          <systemProperties>
-            <java.awt.headless>true</java.awt.headless>
-            <spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
-          </systemProperties>
-        </configuration>
-        <executions>
-          <execution>
-            <id>test</id>
-            <goals>
-              <goal>test</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <profiles>
-    <profile>
-      <id>build-all</id>
-      <properties>
-        <spark.version>2.3.4</spark.version>
-        <scala.binary.version>2.11</scala.binary.version>
-        <scala.version>2.11.8</scala.version>
-      </properties>
-    </profile>
-    <profile>
-      <id>sdvtest</id>
-      <properties>
-        <maven.test.skip>true</maven.test.skip>
-      </properties>
-    </profile>
-    <profile>
-      <id>spark-2.3</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <spark.version>2.3.4</spark.version>
-        <scala.binary.version>2.11</scala.binary.version>
-        <scala.version>2.11.8</scala.version>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-compiler-plugin</artifactId>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
-</project>
diff --git a/integration/spark-datasource/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java b/integration/spark-datasource/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java
deleted file mode 100644
index 2de5df3..0000000
--- a/integration/spark-datasource/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.sdk.util;
-
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.sdk.file.CarbonWriter;
-import org.apache.carbondata.sdk.file.Field;
-import org.apache.carbondata.sdk.file.Schema;
-
-import java.io.*;
-
-import static org.apache.carbondata.sdk.file.utils.SDKUtil.listFiles;
-
-public class BinaryUtil {
-  public static void binaryToCarbon(String sourceImageFolder, String outputPath,
-                                    String sufAnnotation, final String sufImage) throws Exception {
-    Field[] fields = new Field[5];
-    fields[0] = new Field("binaryId", DataTypes.INT);
-    fields[1] = new Field("binaryName", DataTypes.STRING);
-    fields[2] = new Field("binary", DataTypes.BINARY);
-    fields[3] = new Field("labelName", DataTypes.STRING);
-    fields[4] = new Field("labelContent", DataTypes.STRING);
-    CarbonWriter writer = CarbonWriter
-        .builder()
-        .outputPath(outputPath)
-        .withCsvInput(new Schema(fields))
-        .withBlockSize(256)
-        .writtenBy("binaryExample")
-        .withPageSizeInMb(1)
-        .build();
-    binaryToCarbon(sourceImageFolder, writer, sufAnnotation, sufImage);
-  }
-
-  public static boolean binaryToCarbon(String sourceImageFolder, CarbonWriter writer,
-      String sufAnnotation, final String sufImage) throws Exception {
-    int num = 1;
-
-    byte[] originBinary = null;
-
-    // read and write image data
-    for (int j = 0; j < num; j++) {
-
-      Object[] files = listFiles(sourceImageFolder, sufImage).toArray();
-
-      if (null != files) {
-        for (int i = 0; i < files.length; i++) {
-          // read image and encode to Hex
-          BufferedInputStream bis = new BufferedInputStream(
-              new FileInputStream(new File((String) files[i])));
-          originBinary = new byte[bis.available()];
-          while ((bis.read(originBinary)) != -1) {
-          }
-
-          String labelFileName = ((String) files[i]).split(sufImage)[0] + sufAnnotation;
-          BufferedInputStream txtBis = new BufferedInputStream(new FileInputStream(labelFileName));
-          String labelValue = null;
-          byte[] labelBinary = null;
-          labelBinary = new byte[txtBis.available()];
-          while ((txtBis.read(labelBinary)) != -1) {
-            labelValue = new String(labelBinary, "UTF-8");
-          }
-          // write data
-          writer.write(new Object[]{i, (String) files[i], originBinary,
-              labelFileName, labelValue});
-          bis.close();
-          txtBis.close();
-        }
-      }
-      writer.close();
-    }
-    return true;
-  }
-
-}
diff --git a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceBinaryTest.scala b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceBinaryTest.scala
deleted file mode 100644
index c5aae8d..0000000
--- a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceBinaryTest.scala
+++ /dev/null
@@ -1,729 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.sql.carbondata.datasource
-
-import java.io.File
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.sdk.util.BinaryUtil
-import org.apache.commons.codec.binary.{Base64, Hex}
-import org.apache.commons.io.FileUtils
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.carbondata.datasource.TestUtil._
-import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
-import org.apache.spark.util.SparkUtil
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
-
-class SparkCarbonDataSourceBinaryTest extends FunSuite with BeforeAndAfterAll {
-
-    var writerPath = new File(this.getClass.getResource("/").getPath
-            + "../../target/SparkCarbonFileFormat/WriterOutput/")
-            .getCanonicalPath
-    var resourcesPath = new File(this.getClass.getResource("/").getPath
-            + "../../../spark-common-test/src/test/resources/")
-            .getCanonicalPath
-    var outputPath = writerPath + 2
-    //getCanonicalPath gives path with \, but the code expects /.
-    writerPath = writerPath.replace("\\", "/")
-
-    var sdkPath = new File(this.getClass.getResource("/").getPath + "../../../../store/sdk/")
-            .getCanonicalPath
-
-    def buildTestBinaryData(): Any = {
-        FileUtils.deleteDirectory(new File(writerPath))
-        FileUtils.deleteDirectory(new File(outputPath))
-
-        val sourceImageFolder = sdkPath + "/src/test/resources/image/flowers"
-        val sufAnnotation = ".txt"
-        BinaryUtil.binaryToCarbon(sourceImageFolder, writerPath, sufAnnotation, ".jpg")
-    }
-
-    def cleanTestData() = {
-        FileUtils.deleteDirectory(new File(writerPath))
-        FileUtils.deleteDirectory(new File(outputPath))
-    }
-
-    import spark._
-
-    override def beforeAll(): Unit = {
-        CarbonProperties.getInstance()
-                .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-                    CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
-        buildTestBinaryData()
-
-        FileUtils.deleteDirectory(new File(outputPath))
-        sql("DROP TABLE IF EXISTS sdkOutputTable")
-    }
-
-    override def afterAll(): Unit = {
-        cleanTestData()
-        sql("DROP TABLE IF EXISTS sdkOutputTable")
-    }
-
-    test("Test direct sql read carbon") {
-        assert(new File(writerPath).exists())
-        checkAnswer(
-            sql(s"SELECT COUNT(*) FROM carbon.`$writerPath`"),
-            Seq(Row(3)))
-    }
-
-    test("Test read image carbon with spark carbon file format, generate by sdk, CTAS") {
-        sql("DROP TABLE IF EXISTS binaryCarbon")
-        sql("DROP TABLE IF EXISTS binaryCarbon3")
-        FileUtils.deleteDirectory(new File(outputPath))
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-            sql(s"CREATE TABLE binaryCarbon USING CARBON OPTIONS(PATH '$writerPath')")
-            sql(s"CREATE TABLE binaryCarbon3 USING CARBON OPTIONS(PATH '$outputPath')" + " AS SELECT * FROM binaryCarbon")
-        } else {
-            sql(s"CREATE TABLE binaryCarbon USING CARBON LOCATION '$writerPath'")
-            sql(s"CREATE TABLE binaryCarbon3 USING CARBON LOCATION '$outputPath'" + " AS SELECT * FROM binaryCarbon")
-        }
-        checkAnswer(sql("SELECT COUNT(*) FROM binaryCarbon"),
-            Seq(Row(3)))
-        checkAnswer(sql("SELECT COUNT(*) FROM binaryCarbon3"),
-            Seq(Row(3)))
-        sql("DROP TABLE IF EXISTS binaryCarbon")
-        sql("DROP TABLE IF EXISTS binaryCarbon3")
-        FileUtils.deleteDirectory(new File(outputPath))
-    }
-
-    test("Don't support sort_columns") {
-        import spark._
-        sql("DROP TABLE IF EXISTS binaryTable")
-        var exception = intercept[Exception] {
-            sql(
-                s"""
-                   | CREATE TABLE binaryTable (
-                   |    id DOUBLE,
-                   |    label BOOLEAN,
-                   |    name STRING,
-                   |    image BINARY,
-                   |    autoLabel BOOLEAN)
-                   | using carbon
-                   | options('SORT_COLUMNS'='image')
-            """.stripMargin)
-            // TODO: it should throw exception when create table
-            sql("SELECT COUNT(*) FROM binaryTable").show()
-        }
-        assert(exception.getCause.getMessage.contains("sort columns not supported for array, struct, map, double, float, decimal, varchar, binary"))
-
-        sql("DROP TABLE IF EXISTS binaryTable")
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-            exception = intercept[Exception] {
-                sql(
-                    s"""
-                       | CREATE TABLE binaryTable
-                       | using carbon
-                       | options(PATH '$writerPath',
-                       |  'SORT_COLUMNS'='image')
-                """.stripMargin)
-                sql("SELECT COUNT(*) FROM binaryTable").show()
-            }
-        } else {
-            exception = intercept[Exception] {
-                sql(
-                    s"""
-                       | CREATE TABLE binaryTable
-                       | using carbon
-                       | options('SORT_COLUMNS'='image')
-                       | LOCATION '$writerPath'
-                """.stripMargin)
-                sql("SELECT COUNT(*) FROM binaryTable").show()
-            }
-        }
-        assert(exception.getMessage.contains("Cannot use sort columns during infer schema"))
-
-
-        sql("DROP TABLE IF EXISTS binaryTable")
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-            exception = intercept[Exception] {
-                sql(
-                    s"""
-                       | CREATE TABLE binaryTable (
-                       |    id DOUBLE,
-                       |    label BOOLEAN,
-                       |    name STRING,
-                       |    image BINARY,
-                       |    autoLabel BOOLEAN)
-                       | using carbon
-                       | options(PATH '$writerPath',
-                       | 'SORT_COLUMNS'='image')
-                 """.stripMargin)
-                sql("SELECT COUNT(*) FROM binaryTable").show()
-            }
-        } else {
-            exception = intercept[Exception] {
-                sql(
-                    s"""
-                       | CREATE TABLE binaryTable (
-                       |    id DOUBLE,
-                       |    label BOOLEAN,
-                       |    name STRING,
-                       |    image BINARY,
-                       |    autoLabel BOOLEAN)
-                       | using carbon
-                       | options('SORT_COLUMNS'='image')
-                       | LOCATION '$writerPath'
-                 """.stripMargin)
-                sql("SELECT COUNT(*) FROM binaryTable").show()
-            }
-        }
-        assert(exception.getCause.getMessage.contains("sort columns not supported for array, struct, map, double, float, decimal, varchar, binary"))
-    }
-
-    test("Don't support long_string_columns for binary") {
-        import spark._
-        sql("DROP TABLE IF EXISTS binaryTable")
-        val exception = intercept[Exception] {
-            sql(
-                s"""
-                   | CREATE TABLE binaryTable (
-                   |    id DOUBLE,
-                   |    label BOOLEAN,
-                   |    name STRING,
-                   |    image BINARY,
-                   |    autoLabel BOOLEAN)
-                   | using carbon
-                   | options('long_string_columns'='image')
-       """.stripMargin)
-            sql("SELECT COUNT(*) FROM binaryTable").show()
-        }
-        assert(exception.getCause.getMessage.contains("long string column : image is not supported for data type: BINARY"))
-    }
-
-    test("Don't support insert into partition table") {
-        if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-            sql("DROP TABLE IF EXISTS binaryCarbon")
-            sql("DROP TABLE IF EXISTS binaryCarbon2")
-            sql("DROP TABLE IF EXISTS binaryCarbon3")
-            sql("DROP TABLE IF EXISTS binaryCarbon4")
-            sql(s"CREATE TABLE binaryCarbon USING CARBON LOCATION '$writerPath'")
-            sql(
-                s"""
-                   | CREATE TABLE binaryCarbon2(
-                   |    binaryId INT,
-                   |    binaryName STRING,
-                   |    binary BINARY,
-                   |    labelName STRING,
-                   |    labelContent STRING
-                   |) USING CARBON""".stripMargin)
-            sql(
-                s"""
-                   | CREATE TABLE binaryCarbon3(
-                   |    binaryId INT,
-                   |    binaryName STRING,
-                   |    binary BINARY,
-                   |    labelName STRING,
-                   |    labelContent STRING
-                   |) USING CARBON partitioned by (binary) """.stripMargin)
-            sql("select binaryId,binaryName,binary,labelName,labelContent from binaryCarbon where binaryId=0").show()
-
-            sql("insert into binaryCarbon2 select binaryId,binaryName,binary,labelName,labelContent from binaryCarbon where binaryId=0 ")
-            val carbonResult2 = sql("SELECT * FROM binaryCarbon2")
-
-            sql("create table binaryCarbon4 using carbon select binaryId,binaryName,binary,labelName,labelContent from binaryCarbon where binaryId=0 ")
-            val carbonResult4 = sql("SELECT * FROM binaryCarbon4")
-            val carbonResult = sql("SELECT * FROM binaryCarbon")
-
-            assert(3 == carbonResult.collect().length)
-            assert(1 == carbonResult4.collect().length)
-            assert(1 == carbonResult2.collect().length)
-            checkAnswer(carbonResult4, carbonResult2)
-
-            try {
-                sql("insert into binaryCarbon3 select binaryId,binaryName,binary,labelName,labelContent from binaryCarbon where binaryId=0 ")
-                assert(false)
-            } catch {
-                case e: Exception =>
-                    e.printStackTrace()
-                    assert(true)
-            }
-            sql("DROP TABLE IF EXISTS binaryCarbon")
-            sql("DROP TABLE IF EXISTS binaryCarbon2")
-            sql("DROP TABLE IF EXISTS binaryCarbon3")
-            sql("DROP TABLE IF EXISTS binaryCarbon4")
-        }
-    }
-
-    test("Test unsafe as false") {
-        CarbonProperties.getInstance()
-                .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE, "false")
-        FileUtils.deleteDirectory(new File(outputPath))
-        sql("DROP TABLE IF EXISTS binaryCarbon")
-        sql("DROP TABLE IF EXISTS binaryCarbon3")
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-            sql(s"CREATE TABLE binaryCarbon USING CARBON OPTIONS(PATH '$writerPath')")
-            sql(s"CREATE TABLE binaryCarbon3 USING CARBON OPTIONS(PATH '$outputPath')" + " AS SELECT * FROM binaryCarbon")
-        } else {
-            sql(s"CREATE TABLE binaryCarbon USING CARBON LOCATION '$writerPath'")
-            sql(s"CREATE TABLE binaryCarbon3 USING CARBON LOCATION '$outputPath'" + " AS SELECT * FROM binaryCarbon")
-        }
-        checkAnswer(sql("SELECT COUNT(*) FROM binaryCarbon"),
-            Seq(Row(3)))
-        checkAnswer(sql("SELECT COUNT(*) FROM binaryCarbon3"),
-            Seq(Row(3)))
-        sql("DROP TABLE IF EXISTS binaryCarbon")
-        sql("DROP TABLE IF EXISTS binaryCarbon3")
-
-        FileUtils.deleteDirectory(new File(outputPath))
-        CarbonProperties.getInstance()
-                .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
-                    CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_DEFAULT)
-    }
-
-    test("insert into for hive and carbon, CTAS") {
-        sql("DROP TABLE IF EXISTS hiveTable")
-        sql("DROP TABLE IF EXISTS carbon_table")
-        sql("DROP TABLE IF EXISTS hiveTable2")
-        sql("DROP TABLE IF EXISTS carbon_table2")
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS hivetable (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | row format delimited fields terminated by ','
-             """.stripMargin)
-        sql("insert into hivetable values(1,true,'Bob','binary',false)")
-        sql("insert into hivetable values(2,false,'Xu','test',true)")
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS carbon_table (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | using carbon
-             """.stripMargin)
-        sql("insert into carbon_table values(1,true,'Bob','binary',false)")
-        sql("insert into carbon_table values(2,false,'Xu','test',true)")
-
-        val hexHiveResult = sql("SELECT hex(image) FROM hivetable")
-        val hexCarbonResult = sql("SELECT hex(image) FROM carbon_table")
-        checkAnswer(hexHiveResult, hexCarbonResult)
-        hexCarbonResult.collect().foreach { each =>
-            val result = new String(Hex.decodeHex((each.getAs[Array[Char]](0)).toString.toCharArray))
-            assert("binary".equals(result)
-                    || "test".equals(result))
-        }
-
-        val base64HiveResult = sql("SELECT base64(image) FROM hivetable")
-        val base64CarbonResult = sql("SELECT base64(image) FROM carbon_table")
-        checkAnswer(base64HiveResult, base64CarbonResult)
-        base64CarbonResult.collect().foreach { each =>
-            val result = new String(Base64.decodeBase64((each.getAs[Array[Char]](0)).toString))
-            assert("binary".equals(result)
-                    || "test".equals(result))
-        }
-
-        val carbonResult = sql("SELECT * FROM carbon_table")
-        val hiveResult = sql("SELECT * FROM hivetable")
-
-        assert(2 == carbonResult.collect().length)
-        assert(2 == hiveResult.collect().length)
-        checkAnswer(hiveResult, carbonResult)
-        carbonResult.collect().foreach { each =>
-            if (1 == each.get(0)) {
-                assert("binary".equals(new String(each.getAs[Array[Byte]](3))))
-            } else if (2 == each.get(0)) {
-                assert("test".equals(new String(each.getAs[Array[Byte]](3))))
-            } else {
-                assert(false)
-            }
-        }
-
-        sql("CREATE TABLE hivetable2 AS SELECT * FROM carbon_table")
-        sql("CREATE TABLE carbon_table2  USING CARBON AS SELECT * FROM hivetable")
-        val carbonResult2 = sql("SELECT * FROM carbon_table2")
-        val hiveResult2 = sql("SELECT * FROM hivetable2")
-        checkAnswer(hiveResult2, carbonResult2)
-        checkAnswer(carbonResult, carbonResult2)
-        checkAnswer(hiveResult, hiveResult2)
-        assert(2 == carbonResult2.collect().length)
-        assert(2 == hiveResult2.collect().length)
-
-        sql("INSERT INTO hivetable2 SELECT * FROM carbon_table")
-        sql("INSERT INTO carbon_table2 SELECT * FROM hivetable")
-        val carbonResult3 = sql("SELECT * FROM carbon_table2")
-        val hiveResult3 = sql("SELECT * FROM hivetable2")
-        checkAnswer(carbonResult3, hiveResult3)
-        assert(4 == carbonResult3.collect().length)
-        assert(4 == hiveResult3.collect().length)
-    }
-
-    test("insert into for parquet and carbon, CTAS") {
-        sql("DROP TABLE IF EXISTS parquetTable")
-        sql("DROP TABLE IF EXISTS carbon_table")
-        sql("DROP TABLE IF EXISTS parquetTable2")
-        sql("DROP TABLE IF EXISTS carbon_table2")
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS parquettable (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | using parquet
-             """.stripMargin)
-        sql("insert into parquettable values(1,true,'Bob','binary',false)")
-        sql("insert into parquettable values(2,false,'Xu','test',true)")
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS carbon_table (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | using carbon
-             """.stripMargin)
-        sql("insert into carbon_table values(1,true,'Bob','binary',false)")
-        sql("insert into carbon_table values(2,false,'Xu','test',true)")
-        val carbonResult = sql("SELECT * FROM carbon_table")
-        val parquetResult = sql("SELECT * FROM parquettable")
-
-        assert(2 == carbonResult.collect().length)
-        assert(2 == parquetResult.collect().length)
-        checkAnswer(parquetResult, carbonResult)
-        carbonResult.collect().foreach { each =>
-            if (1 == each.get(0)) {
-                assert("binary".equals(new String(each.getAs[Array[Byte]](3))))
-            } else if (2 == each.get(0)) {
-                assert("test".equals(new String(each.getAs[Array[Byte]](3))))
-            } else {
-                assert(false)
-            }
-        }
-
-        sql("CREATE TABLE parquettable2 AS SELECT * FROM carbon_table")
-        sql("CREATE TABLE carbon_table2  USING CARBON AS SELECT * FROM parquettable")
-        val carbonResult2 = sql("SELECT * FROM carbon_table2")
-        val parquetResult2 = sql("SELECT * FROM parquettable2")
-        checkAnswer(parquetResult2, carbonResult2)
-        checkAnswer(carbonResult, carbonResult2)
-        checkAnswer(parquetResult, parquetResult2)
-        assert(2 == carbonResult2.collect().length)
-        assert(2 == parquetResult2.collect().length)
-
-        sql("INSERT INTO parquettable2 SELECT * FROM carbon_table")
-        sql("INSERT INTO carbon_table2 SELECT * FROM parquettable")
-        val carbonResult3 = sql("SELECT * FROM carbon_table2")
-        val parquetResult3 = sql("SELECT * FROM parquettable2")
-        checkAnswer(carbonResult3, parquetResult3)
-        assert(4 == carbonResult3.collect().length)
-        assert(4 == parquetResult3.collect().length)
-    }
-
-    test("insert into carbon as select from hive after hive load data") {
-        sql("DROP TABLE IF EXISTS hiveTable")
-        sql("DROP TABLE IF EXISTS carbon_table")
-        sql("DROP TABLE IF EXISTS hiveTable2")
-        sql("DROP TABLE IF EXISTS carbon_table2")
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS hivetable (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | row format delimited fields terminated by '|'
-             """.stripMargin)
-        sql(
-            s"""
-               | LOAD DATA LOCAL INPATH '$resourcesPath/binarystringdata.csv'
-               | INTO TABLE hivetable
-             """.stripMargin)
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS carbon_table (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | using carbon
-             """.stripMargin)
-        sql("insert into carbon_table select * from hivetable")
-
-        sqlContext.udf.register("decodeHex", (str: String) =>
-            Hex.decodeHex(str.toList.map(_.toInt.toBinaryString).mkString.toCharArray))
-        sqlContext.udf.register("unHexValue", (str: String) =>
-            org.apache.spark.sql.catalyst.expressions.Hex.unhex(str.toList.map(_.toInt.toBinaryString).mkString.getBytes))
-        sqlContext.udf.register("decodeBase64", (str: String) => Base64.decodeBase64(str.getBytes()))
-
-        val udfHexResult = sql("SELECT decodeHex(image) FROM carbon_table")
-        val unHexResult = sql("SELECT unHexValue(image) FROM carbon_table")
-        checkAnswer(udfHexResult, unHexResult)
-
-        val udfBase64Result = sql("SELECT decodeBase64(image) FROM carbon_table")
-        val unbase64Result = sql("SELECT unbase64(image) FROM carbon_table")
-        checkAnswer(udfBase64Result, unbase64Result)
-
-        val carbonResult = sql("SELECT * FROM carbon_table")
-        val hiveResult = sql("SELECT * FROM hivetable")
-
-        assert(3 == carbonResult.collect().length)
-        assert(3 == hiveResult.collect().length)
-        checkAnswer(hiveResult, carbonResult)
-        carbonResult.collect().foreach { each =>
-            if (2 == each.get(0)) {
-                assert("\u0001history\u0002".equals(new String(each.getAs[Array[Byte]](3))))
-            } else if (1 == each.get(0)) {
-                assert("\u0001education\u0002".equals(new String(each.getAs[Array[Byte]](3))))
-            } else if (3 == each.get(0)) {
-                assert("".equals(new String(each.getAs[Array[Byte]](3)))
-                       || "\u0001biology\u0002".equals(new String(each.getAs[Array[Byte]](3))))
-            } else {
-                assert(false)
-            }
-        }
-
-        sql("CREATE TABLE hivetable2 AS SELECT * FROM carbon_table")
-        sql("CREATE TABLE carbon_table2  USING CARBON AS SELECT * FROM hivetable")
-        val carbonResult2 = sql("SELECT * FROM carbon_table2")
-        val hiveResult2 = sql("SELECT * FROM hivetable2")
-        checkAnswer(hiveResult2, carbonResult2)
-        checkAnswer(carbonResult, carbonResult2)
-        checkAnswer(hiveResult, hiveResult2)
-        assert(3 == carbonResult2.collect().length)
-        assert(3 == hiveResult2.collect().length)
-
-        sql("INSERT INTO hivetable2 SELECT * FROM carbon_table")
-        sql("INSERT INTO carbon_table2 SELECT * FROM hivetable")
-        val carbonResult3 = sql("SELECT * FROM carbon_table2")
-        val hiveResult3 = sql("SELECT * FROM hivetable2")
-        checkAnswer(carbonResult3, hiveResult3)
-        assert(6 == carbonResult3.collect().length)
-        assert(6 == hiveResult3.collect().length)
-
-    }
-
-    test("filter for hive and carbon") {
-        sql("DROP TABLE IF EXISTS hiveTable")
-        sql("DROP TABLE IF EXISTS carbon_table")
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS hivetable (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | row format delimited fields terminated by ','
-             """.stripMargin)
-        sql("insert into hivetable values(1,true,'Bob','binary',false)")
-        sql("insert into hivetable values(2,false,'Xu','test',true)")
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS carbon_table (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    image binary,
-               |    autoLabel boolean)
-               | using carbon
-             """.stripMargin)
-        sql("insert into carbon_table values(1,true,'Bob','binary',false)")
-        sql("insert into carbon_table values(2,false,'Xu','test',true)")
-
-        // filter with equal
-        val hiveResult = sql("SELECT * FROM hivetable where image=cast('binary' as binary)")
-        val carbonResult = sql("SELECT * FROM carbon_table where image=cast('binary' as binary)")
-
-        checkAnswer(hiveResult, carbonResult)
-        assert(1 == carbonResult.collect().length)
-        carbonResult.collect().foreach { each =>
-            assert(1 == each.get(0))
-            assert("binary".equals(new String(each.getAs[Array[Byte]](3))))
-        }
-
-        // filter with non string
-        val exception = intercept[Exception] {
-            sql("SELECT * FROM carbon_table where image=binary").collect()
-        }
-        assert(exception.getMessage.contains("cannot resolve '`binary`' given input columns"))
-
-        // filter with not equal
-        val hiveResult3 = sql("SELECT * FROM hivetable where image!=cast('binary' as binary)")
-        val carbonResult3 = sql("SELECT * FROM carbon_table where image!=cast('binary' as binary)")
-        checkAnswer(hiveResult3, carbonResult3)
-        assert(1 == carbonResult3.collect().length)
-        carbonResult3.collect().foreach { each =>
-            assert(2 == each.get(0))
-            assert("test".equals(new String(each.getAs[Array[Byte]](3))))
-        }
-
-        // filter with in
-        val hiveResult4 = sql("SELECT * FROM hivetable where image in (cast('binary' as binary))")
-        val carbonResult4 = sql("SELECT * FROM carbon_table where image in (cast('binary' as binary))")
-        checkAnswer(hiveResult4, carbonResult4)
-        assert(1 == carbonResult4.collect().length)
-        carbonResult4.collect().foreach { each =>
-            assert(1 == each.get(0))
-            assert("binary".equals(new String(each.getAs[Array[Byte]](3))))
-        }
-
-        // filter with not in
-        val hiveResult5 = sql("SELECT * FROM hivetable where image not in (cast('binary' as binary))")
-        val carbonResult5 = sql("SELECT * FROM carbon_table where image not in (cast('binary' as binary))")
-        checkAnswer(hiveResult5, carbonResult5)
-        assert(1 == carbonResult5.collect().length)
-        carbonResult5.collect().foreach { each =>
-            assert(2 == each.get(0))
-            assert("test".equals(new String(each.getAs[Array[Byte]](3))))
-        }
-    }
-
-    test("Spark DataSource don't support update, delete") {
-        sql("DROP TABLE IF EXISTS carbon_table")
-        sql("DROP TABLE IF EXISTS carbon_table2")
-
-        sql(
-            s"""
-               | CREATE TABLE IF NOT EXISTS carbon_table (
-               |    id int,
-               |    label boolean,
-               |    name string,
-               |    binaryField binary,
-               |    autoLabel boolean)
-               | using carbon
-             """.stripMargin)
-        sql("insert into carbon_table values(1,true,'Bob','binary',false)")
-        sql("insert into carbon_table values(2,false,'Xu','test',true)")
-
-        val carbonResult = sql("SELECT * FROM carbon_table")
-
-        carbonResult.collect().foreach { each =>
-            if (1 == each.get(0)) {
-                assert("binary".equals(new String(each.getAs[Array[Byte]](3))))
-            } else if (2 == each.get(0)) {
-                assert("test".equals(new String(each.getAs[Array[Byte]](3))))
-            } else {
-                assert(false)
-            }
-        }
-
-        var exception = intercept[Exception] {
-            sql("UPDATE carbon_table SET binaryField = 'binary2' WHERE id = 1").show()
-        }
-        assert(exception.getMessage.contains("mismatched input 'UPDATE' expecting"))
-
-        exception = intercept[Exception] {
-            sql("DELETE FROM carbon_table WHERE id = 1").show()
-        }
-        assert(exception.getMessage.contains("Operation not allowed: DELETE FROM"))
-    }
-
-    test("test array of binary data type with sparkfileformat ") {
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-        sql("create table if not exists carbon_table(id int, label boolean, name string," +
-            "binaryField array<binary>, autoLabel boolean) using carbon")
-        sql("insert into carbon_table values(1,true,'abc',array('binary'),false)")
-        sql("create table if not exists parquet_table(id int, label boolean, name string," +
-            "binaryField array<binary>, autoLabel boolean) using parquet")
-        sql("insert into parquet_table values(1,true,'abc',array('binary'),false)")
-        checkAnswer(sql("SELECT binaryField[0] FROM carbon_table"),
-            sql("SELECT binaryField[0] FROM parquet_table"))
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-    }
-
-    test("test struct of binary data type with sparkfileformat ") {
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-        sql("create table if not exists carbon_table(id int, label boolean, name string," +
-            "binaryField struct<b:binary>, autoLabel boolean) using carbon")
-        sql("insert into carbon_table values(1,true,'abc',named_struct('b','binary'),false)")
-        sql("create table if not exists parquet_table(id int, label boolean, name string," +
-            "binaryField struct<b:binary>, autoLabel boolean) using parquet")
-        sql("insert into parquet_table values(1,true,'abc',named_struct('b','binary'),false)")
-        checkAnswer(sql("SELECT binaryField.b FROM carbon_table"),
-            sql("SELECT binaryField.b FROM parquet_table"))
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-    }
-
-    test("test map of binary data type with sparkfileformat") {
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-        sql("create table if not exists parquet_table(id int, label boolean, name string," +
-            "binaryField map<int, binary>, autoLabel boolean) using parquet")
-        sql("insert into parquet_table values(1,true,'abc',map(1,'binary'),false)")
-        sql("create table if not exists carbon_table(id int, label boolean, name string," +
-            "binaryField map<int, binary>, autoLabel boolean) using carbon")
-        sql("insert into carbon_table values(1,true,'abc',map(1,'binary'),false)")
-        checkAnswer(sql("SELECT binaryField[1] FROM carbon_table"),
-            sql("SELECT binaryField[1] FROM parquet_table"))
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-    }
-
-    test("test map of array and struct binary data type with sparkfileformat") {
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-        sql("create table if not exists parquet_table(id int, label boolean, name string," +
-            "binaryField1 map<int, array<binary>>, binaryField2 map<int, struct<b:binary>> ) " +
-            "using parquet")
-        sql("insert into parquet_table values(1,true,'abc',map(1,array('binary')),map(1," +
-            "named_struct('b','binary')))")
-        sql("create table if not exists carbon_table(id int, label boolean, name string," +
-            "binaryField1 map<int, array<binary>>, binaryField2 map<int, struct<b:binary>> ) " +
-            "using carbon")
-        sql("insert into carbon_table values(1,true,'abc',map(1,array('binary')),map(1," +
-            "named_struct('b','binary')))")
-        checkAnswer(sql("SELECT binaryField1[1][1] FROM carbon_table"),
-            sql("SELECT binaryField1[1][1] FROM parquet_table"))
-        checkAnswer(sql("SELECT binaryField2[1].b FROM carbon_table"),
-            sql("SELECT binaryField2[1].b FROM parquet_table"))
-        sql("drop table if exists carbon_table")
-    }
-
-    test("test of array of struct and struct of array of binary data type with sparkfileformat") {
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-        sql("create table if not exists parquet_table(id int, label boolean, name string," +
-            "binaryField1 array<struct<b1:binary>>, binaryField2 struct<b2:array<binary>> ) " +
-            "using parquet")
-        sql("insert into parquet_table values(1,true,'abc',array(named_struct('b1','binary'))," +
-            "named_struct('b2',array('binary')))")
-        sql("create table if not exists carbon_table(id int, label boolean, name string," +
-            "binaryField1 array<struct<b1:binary>>, binaryField2 struct<b2:array<binary>> ) " +
-            "using carbon")
-        sql("insert into carbon_table values(1,true,'abc',array(named_struct('b1','binary'))," +
-            "named_struct('b2',array('binary')))")
-        checkAnswer(sql("SELECT binaryField1[1].b1 FROM carbon_table"),
-            sql("SELECT  binaryField1[1].b1 FROM parquet_table"))
-        checkAnswer(sql("SELECT binaryField2.b2[0] FROM carbon_table"),
-            sql("SELECT binaryField2.b2[0] FROM parquet_table"))
-        sql("drop table if exists carbon_table")
-        sql("drop table if exists parquet_table")
-    }
-
-}
diff --git a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceTest.scala b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceTest.scala
deleted file mode 100644
index bd42c13..0000000
--- a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/SparkCarbonDataSourceTest.scala
+++ /dev/null
@@ -1,1983 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.sql.carbondata.datasource
-
-import java.io.File
-import java.util
-
-import scala.collection.JavaConverters._
-import scala.collection.mutable
-
-import org.apache.commons.io.FileUtils
-import org.apache.hadoop.fs.permission.{FsAction, FsPermission}
-import org.apache.spark.sql.{AnalysisException, Row}
-import org.apache.spark.sql.carbondata.datasource.TestUtil._
-import org.apache.spark.sql.types.{BinaryType, IntegerType, StringType, StructType, StructField => SparkStructField}
-import org.apache.spark.util.SparkUtil
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.datamap.DataMapStoreManager
-import org.apache.carbondata.core.datastore.impl.FileFactory
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
-import org.apache.carbondata.core.metadata.datatype.{DataTypes, StructField}
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.hadoop.testutil.StoreCreator
-import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
-
-class SparkCarbonDataSourceTest extends FunSuite with BeforeAndAfterAll {
-
-
-  var writerOutputPath = new File(this.getClass.getResource("/").getPath
-          + "../../target/SparkCarbonFileFormat/SDKWriterOutput/").getCanonicalPath
-  //getCanonicalPath gives path with \, but the code expects /.
-  writerOutputPath = writerOutputPath.replace("\\", "/")
-
-  def buildTestData(rows: Int,
-                    sortColumns: List[String]): Any = {
-    val schema = new StringBuilder()
-            .append("[ \n")
-            .append("   {\"stringField\":\"string\"},\n")
-            .append("   {\"byteField\":\"byte\"},\n")
-            .append("   {\"shortField\":\"short\"},\n")
-            .append("   {\"intField\":\"int\"},\n")
-            .append("   {\"longField\":\"long\"},\n")
-            .append("   {\"doubleField\":\"double\"},\n")
-            .append("   {\"floatField\":\"float\"},\n")
-            .append("   {\"decimalField\":\"decimal(17,2)\"},\n")
-            .append("   {\"boolField\":\"boolean\"},\n")
-            .append("   {\"dateField\":\"DATE\"},\n")
-            .append("   {\"timeField\":\"TIMESTAMP\"},\n")
-            .append("   {\"varcharField\":\"varchar\"},\n")
-            .append("   {\"varcharField2\":\"varchar\"}\n")
-            .append("]")
-            .toString()
-
-    try {
-      val builder = CarbonWriter.builder()
-      val writer =
-        builder.outputPath(writerOutputPath)
-                .sortBy(sortColumns.toArray)
-                .uniqueIdentifier(System.currentTimeMillis)
-                .withBlockSize(2)
-                .withCsvInput(Schema.parseJson(schema))
-                .writtenBy("TestNonTransactionalCarbonTable")
-                .build()
-      var i = 0
-      while (i < rows) {
-        writer.write(Array[String]("robot" + i,
-          String.valueOf(i / 100),
-          String.valueOf(i / 100),
-          String.valueOf(i),
-          String.valueOf(i),
-          String.valueOf(i),
-          String.valueOf(i),
-          String.valueOf(i),
-          "true",
-          "2019-03-02",
-          "2019-02-12 03:03:34",
-          "var1",
-          "var2"))
-        i += 1
-      }
-      writer.close()
-    } catch {
-      case ex: Throwable => throw new RuntimeException(ex)
-    }
-  }
-
-  test("Carbon DataSource read SDK data with varchar") {
-    import spark._
-    FileUtils.deleteDirectory(new File(writerOutputPath))
-    val num = 10000
-    buildTestData(num, List("stringField", "intField"))
-    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-      sql("DROP TABLE IF EXISTS carbontable_varchar")
-      sql("DROP TABLE IF EXISTS carbontable_varchar2")
-      sql(s"CREATE TABLE carbontable_varchar USING CARBON LOCATION '$writerOutputPath'")
-      val e = intercept[Exception] {
-        sql("SELECT COUNT(*) FROM carbontable_varchar").show()
-      }
-      assert(e.getMessage.contains("Datatype of the Column VARCHAR present in index file, is varchar and not same as datatype of the column with same name present in table, because carbon convert varchar of carbon to string of spark, please set long_string_columns for varchar column"))
-
-      sql(s"CREATE TABLE carbontable_varchar2 USING CARBON OPTIONS('long_String_columns'='varcharField,varcharField2') LOCATION '$writerOutputPath'")
-      checkAnswer(sql("SELECT COUNT(*) FROM carbontable_varchar2"), Seq(Row(num)))
-    }
-  }
-
-  test("test write using dataframe") {
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-    spark.sql("drop table if exists testformat")
-    // Saves dataframe to carbon file
-    df.write
-      .format("carbon").saveAsTable("testformat")
-    assert(spark.sql("select * from testformat").count() == 10)
-    assert(spark.sql("select * from testformat where c1='a0'").count() == 1)
-    assert(spark.sql("select * from testformat").count() == 10)
-    spark.sql("drop table if exists testformat")
-  }
-
-  test("test write using ddl") {
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-    spark.sql("drop table if exists testparquet")
-    spark.sql("drop table if exists testformat")
-    // Saves dataframe to carbon file
-    df.write
-      .format("parquet").saveAsTable("testparquet")
-    spark.sql("create table carbon_table(c1 string, c2 string, number int) using carbon")
-    spark.sql("insert into carbon_table select * from testparquet")
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table where c1='a1'"), spark.sql("select * from testparquet where c1='a1'"))
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-      DataMapStoreManager.getInstance()
-        .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table"))
-      assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-    }
-    spark.sql("drop table if exists testparquet")
-    spark.sql("drop table if exists testformat")
-  }
-
-  test("test add columns for table of using carbon with sql") {
-    // TODO: should support add columns for carbon dataSource table
-    // Limit from spark
-    import spark.implicits._
-    import spark._
-    try {
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-      // Saves dataFrame to carbon file
-      df.write
-        .format("parquet").saveAsTable("test_parquet")
-      sql("CREATE TABLE carbon_table(c1 STRING, c2 STRING, number INT) USING carbon")
-      sql("INSERT INTO carbon_table SELECT * FROM test_parquet")
-      TestUtil.checkAnswer(sql("SELECT * FROM carbon_table WHERE c1='a1'"),
-        sql("SELECT * FROM test_parquet WHERE c1='a1'"))
-      if (!SparkUtil.isSparkVersionEqualTo("2.1")) {
-        val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-        DataMapStoreManager.getInstance()
-          .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table"))
-        assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-      }
-      assert(df.schema.map(_.name) === Seq("c1", "c2", "number"))
-      sql("ALTER TABLE carbon_table ADD COLUMNS (a1 INT, b1 STRING) ")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-          assert(e.getMessage.contains("Operation not allowed: ALTER TABLE ADD COLUMNS"))
-        } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-          assert(e.getMessage.contains("ALTER ADD COLUMNS does not support datasource table with type carbon."))
-        }
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-    }
-  }
-
-  test("test add columns for table of using carbon with DF") {
-    import spark.implicits._
-    import spark._
-    try {
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-      sql("DROP TABLE IF EXISTS carbon_table")
-      // Saves dataFrame to carbon file
-      df.write
-        .format("carbon").saveAsTable("carbon_table")
-      val customSchema = StructType(Array(
-        SparkStructField("c1", StringType),
-        SparkStructField("c2", StringType),
-        SparkStructField("number", IntegerType)))
-
-      val carbonDF = spark.read
-        .format("carbon")
-        .option("tableName", "carbon_table")
-        .schema(customSchema)
-        .load()
-
-      assert(carbonDF.schema.map(_.name) === Seq("c1", "c2", "number"))
-      val carbonDF2 = carbonDF.drop("c1")
-      assert(carbonDF2.schema.map(_.name) === Seq("c2", "number"))
-    } catch {
-      case e: Exception =>
-        e.printStackTrace()
-        assert(false)
-    } finally {
-      sql("DROP TABLE IF EXISTS carbon_table")
-    }
-  }
-
-  test("test drop columns for table of using carbon") {
-    // TODO: should support drop columns for carbon dataSource table
-    // Limit from spark
-    import spark.implicits._
-    import spark._
-    try {
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-      // Saves dataFrame to carbon file
-      df.write
-        .format("parquet").saveAsTable("test_parquet")
-      sql("CREATE TABLE carbon_table(c1 STRING, c2 STRING, number INT) USING carbon")
-      sql("INSERT INTO carbon_table SELECT * FROM test_parquet")
-      TestUtil.checkAnswer(sql("SELECT * FROM carbon_table WHERE c1='a1'"),
-        sql("SELECT * FROM test_parquet WHERE c1='a1'"))
-      if (!sparkContext.version.startsWith("2.1")) {
-        val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-        DataMapStoreManager.getInstance()
-          .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table"))
-        assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-      }
-      assert(df.schema.map(_.name) === Seq("c1", "c2", "number"))
-      sql("ALTER TABLE carbon_table drop COLUMNS (a1 INT, b1 STRING) ")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("mismatched input 'COLUMNS' expecting"))
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-    }
-  }
-
-  test("test rename table name for table of using carbon") {
-    import spark.implicits._
-    import spark._
-    try {
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-      sql("DROP TABLE IF EXISTS carbon_table2")
-      // Saves dataFrame to carbon file
-      df.write
-        .format("parquet").saveAsTable("test_parquet")
-      sql("CREATE TABLE carbon_table(c1 STRING, c2 STRING, number INT) USING carbon")
-      sql("INSERT INTO carbon_table SELECT * FROM test_parquet")
-      TestUtil.checkAnswer(sql("SELECT * FROM carbon_table WHERE c1='a1'"),
-        sql("SELECT * FROM test_parquet WHERE c1='a1'"))
-      if (!sparkContext.version.startsWith("2.1")) {
-        val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-        DataMapStoreManager.getInstance()
-          .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table"))
-        assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-      }
-      assert(df.schema.map(_.name) === Seq("c1", "c2", "number"))
-      sql("ALTER TABLE carbon_table RENAME TO carbon_table2 ")
-      checkAnswer(sql("SELECT COUNT(*) FROM carbon_table2"), Seq(Row(10)));
-    } catch {
-      case e: Exception =>
-        e.printStackTrace()
-        assert(false)
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-    }
-  }
-
-  test("test change data type for table of using carbon") {
-    //TODO: Limit from spark
-    import spark.implicits._
-    import spark._
-    try {
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-      sql("DROP TABLE IF EXISTS carbon_table2")
-      // Saves dataFrame to carbon file
-      df.write
-        .format("parquet").saveAsTable("test_parquet")
-      sql("CREATE TABLE carbon_table(c1 STRING, c2 STRING, number decimal(8,2)) USING carbon")
-      sql("INSERT INTO carbon_table SELECT * FROM test_parquet")
-      TestUtil.checkAnswer(sql("SELECT * FROM carbon_table WHERE c1='a1'"),
-        sql("SELECT * FROM test_parquet WHERE c1='a1'"))
-      if (!SparkUtil.isSparkVersionEqualTo("2.1")) {
-        val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-        DataMapStoreManager.getInstance()
-          .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table"))
-        assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-      }
-      assert(df.schema.map(_.name) === Seq("c1", "c2", "number"))
-      sql("ALTER TABLE carbon_table change number number decimal(9,4)")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-          assert(e.getMessage.contains("Operation not allowed: ALTER TABLE change"))
-        } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-          assert(e.getMessage.contains("ALTER TABLE CHANGE COLUMN is not supported for changing column"))
-        }
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS carbon_table")
-    }
-  }
-
-  test("test add columns for table of using parquet") {
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-    import spark._
-    try {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS test_parquet2")
-      df.write
-        .format("parquet").saveAsTable("test_parquet")
-      sql("ALTER TABLE test_parquet ADD COLUMNS(a1 INT, b1 STRING) ")
-      sql("INSERT INTO test_parquet VALUES('Bob','xu',12,1,'parquet')")
-      TestUtil.checkAnswer(sql("SELECT COUNT(*) FROM test_parquet"), Seq(Row(11)))
-
-      sql("DROP TABLE IF EXISTS test_parquet2")
-      sql("CREATE TABLE test_parquet2(c1 STRING, c2 STRING, number INT) USING parquet")
-      sql("INSERT INTO test_parquet2 VALUES('Bob','xu',12)")
-      sql("ALTER TABLE test_parquet2 ADD COLUMNS (a1 INT, b1 STRING) ")
-      sql("INSERT INTO test_parquet2 VALUES('Bob','xu',12,1,'parquet')")
-      TestUtil.checkAnswer(sql("SELECT COUNT(*) FROM test_parquet2"), Seq(Row(2)))
-    } catch {
-      case e: Exception =>
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-          assert(e.getMessage.contains("ALTER TABLE test_parquet ADD COLUMNS"))
-        } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-          e.printStackTrace()
-          assert(false)
-        }
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS test_parquet2")
-    }
-  }
-
-  test("test drop columns for table of using parquet") {
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-    import spark._
-
-    sql("DROP TABLE IF EXISTS test_parquet")
-    sql("DROP TABLE IF EXISTS test_parquet2")
-    df.write
-      .format("parquet").saveAsTable("test_parquet")
-
-    val df2 = df.drop("c1")
-
-    assert(df.schema.map(_.name) === Seq("c1", "c2", "number"))
-    assert(df2.schema.map(_.name) === Seq("c2", "number"))
-
-    try {
-      sql("ALTER TABLE test_parquet DROP COLUMNS(c1)")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("mismatched input 'COLUMNS' expecting"))
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-    }
-
-    sql("DROP TABLE IF EXISTS test_parquet2")
-    sql("CREATE TABLE test_parquet2(c1 STRING, c2 STRING, number INT) USING parquet")
-    sql("INSERT INTO test_parquet2 VALUES('Bob','xu',12)")
-    try {
-      sql("ALTER TABLE test_parquet2 DROP COLUMNS (a1 INT, b1 STRING) ")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        assert(e.getMessage.contains("mismatched input 'COLUMNS' expecting"))
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet2")
-    }
-  }
-
-  test("test rename table name for table of using parquet") {
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-    import spark._
-
-    sql("DROP TABLE IF EXISTS test_parquet")
-    sql("DROP TABLE IF EXISTS test_parquet2")
-    sql("DROP TABLE IF EXISTS test_parquet3")
-    sql("DROP TABLE IF EXISTS test_parquet22")
-    df.write
-      .format("parquet").saveAsTable("test_parquet")
-
-    try {
-      sql("ALTER TABLE test_parquet rename to test_parquet3")
-      checkAnswer(sql("SELECT COUNT(*) FROM test_parquet3"), Seq(Row(10)));
-    } catch {
-      case e: Exception =>
-        e.printStackTrace()
-        assert(false)
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-      sql("DROP TABLE IF EXISTS test_parquet3")
-    }
-
-    sql("DROP TABLE IF EXISTS test_parquet2")
-    sql("CREATE TABLE test_parquet2(c1 STRING, c2 STRING, number INT) USING parquet")
-    sql("INSERT INTO test_parquet2 VALUES('Bob','xu',12)")
-    try {
-      sql("ALTER TABLE test_parquet2 rename to test_parquet22")
-      checkAnswer(sql("SELECT COUNT(*) FROM test_parquet22"), Seq(Row(1)));
-    } catch {
-      case e: Exception =>
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-          assert(e.getMessage.contains("Operation not allowed: ALTER TABLE CHANGE"))
-        } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-          e.printStackTrace()
-          assert(false)
-        }
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet2")
-      sql("DROP TABLE IF EXISTS test_parquet22")
-    }
-  }
-
-  test("test change data type for table of using parquet") {
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-    import spark._
-
-    sql("DROP TABLE IF EXISTS test_parquet")
-    sql("DROP TABLE IF EXISTS test_parquet2")
-    df.write
-      .format("parquet").saveAsTable("test_parquet")
-    try {
-      sql("ALTER TABLE test_parquet CHANGE number number long")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-          assert(e.getMessage.contains("Operation not allowed: ALTER TABLE CHANGE"))
-        } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-          assert(e.getMessage.contains("ALTER TABLE CHANGE COLUMN is not supported for changing column"))
-        }
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet")
-    }
-    sql("DROP TABLE IF EXISTS test_parquet2")
-    sql("CREATE TABLE test_parquet2(c1 STRING, c2 STRING, number INT) USING parquet")
-    sql("INSERT INTO test_parquet2 VALUES('Bob','xu',12)")
-    try {
-      sql("ALTER TABLE test_parquet2 CHANGE number number long")
-      assert(false)
-    } catch {
-      case e: Exception =>
-        if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-          assert(e.getMessage.contains("Operation not allowed: ALTER TABLE CHANGE"))
-        } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-          assert(e.getMessage.contains("ALTER TABLE CHANGE COLUMN is not supported for changing column"))
-        }
-    } finally {
-      sql("DROP TABLE IF EXISTS test_parquet2")
-    }
-  }
-
-  test("test read with df write") {
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-
-    // Saves dataframe to carbon file
-    df.write.format("carbon").save(warehouse1 + "/test_folder/")
-
-    val frame = spark.read.format("carbon").load(warehouse1 + "/test_folder")
-    frame.show()
-    assert(frame.count() == 10)
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-  }
-
-  test("test write using subfolder") {
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-      import spark.implicits._
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-
-      // Saves dataframe to carbon file
-      df.write.format("carbon").save(warehouse1 + "/test_folder/"+System.nanoTime())
-      df.write.format("carbon").save(warehouse1 + "/test_folder/"+System.nanoTime())
-      df.write.format("carbon").save(warehouse1 + "/test_folder/"+System.nanoTime())
-
-      val frame = spark.read.format("carbon").load(warehouse1 + "/test_folder")
-      assert(frame.where("c1='a1'").count() == 3)
-
-        val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-        DataMapStoreManager.getInstance()
-          .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/test_folder"))
-        assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-      FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    }
-  }
-
-  test("test write using partition ddl") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists testparquet")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-
-    // Saves dataframe to carbon file
-    df.write
-      .format("parquet").partitionBy("c2").saveAsTable("testparquet")
-    spark.sql("create table carbon_table(c1 string, c2 string, number int) using carbon  PARTITIONED by (c2)")
-    spark.sql("insert into carbon_table select * from testparquet")
-    // TODO fix in 2.1
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      assert(spark.sql("select * from carbon_table").count() == 10)
-      TestUtil
-        .checkAnswer(spark.sql("select * from carbon_table"),
-          spark.sql("select * from testparquet"))
-    }
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists testparquet")
-  }
-
-  test("test write with struct type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, ("b", "c"), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 struct<a1:string, a2:string>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with array type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Array("b", "c"), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 array<string>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with nested array and struct type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Array(("1", "2"), ("3", "4")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("describe parquet_table").show(false)
-    spark.sql("create table carbon_table(c1 string, c2 array<struct<a1:string, a2:string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with nested struct and array type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, (Array("1", "2"), ("3", "4")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 struct<a1:array<string>, a2:struct<a1:string, a2:string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with array type with value as nested map type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Array(Map("b" -> "c")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 array<map<string,string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with array type with value as nested array<array<map>> type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Array(Array(Map("b" -> "c"))), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 array<array<map<string,string>>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with struct type with value as nested map type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, ("a", Map("b" -> "c")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 struct<a1:string, a2:map<string,string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with struct type with value as nested struct<array<map>> type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, ("a", Array(Map("b" -> "c"))), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 struct<a1:string, a2:array<map<string,string>>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with map type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Map("b" -> "c"), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 map<string, string>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with map type with Int data type as key") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Map(99 -> "c"), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 map<int, string>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with map type with value as nested map type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Map("a" -> Map("b" -> "c")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 map<string, map<string, string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with map type with value as nested struct type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Map("a" -> ("b", "c")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 map<string, struct<a1:string, a2:string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with map type with value as nested array type") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Map("a" -> Array("b", "c")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 map<string, array<string>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write using ddl and options") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists testparquet")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x))
-      .toDF("c1", "c2", "number")
-
-    // Saves dataframe to carbon file
-    df.write
-      .format("parquet").saveAsTable("testparquet")
-    spark.sql("create table carbon_table(c1 string, c2 string, number int) using carbon options('table_blocksize'='256','inverted_index'='c1')")
-    spark.sql("describe formatted carbon_table").show()
-    TestUtil.checkExistence(spark.sql("describe formatted carbon_table"), true, "table_blocksize")
-    TestUtil.checkExistence(spark.sql("describe formatted carbon_table"), true, "inverted_index")
-    spark.sql("insert into carbon_table select * from testparquet")
-    spark.sql("select * from carbon_table").show()
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists testparquet")
-  }
-
-  test("test read with nested struct and array type without creating table") {
-    FileFactory
-      .deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_carbon_folder"))
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, (Array("1", "2"), ("3", "4")), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    val frame = spark.sql("select * from parquet_table")
-    frame.write.format("carbon").save(warehouse1 + "/test_carbon_folder")
-    val dfread = spark.read.format("carbon").load(warehouse1 + "/test_carbon_folder")
-    dfread.show(false)
-    FileFactory
-      .deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_carbon_folder"))
-    spark.sql("drop table if exists parquet_table")
-  }
-
-
-  test("test read and write with date datatype") {
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-    spark.sql("create table date_table(empno int, empname string, projdate Date) using carbon")
-    spark.sql("insert into  date_table select 11, 'ravi', '2017-11-11'")
-    spark.sql("create table date_parquet_table(empno int, empname string, projdate Date) using parquet")
-    spark.sql("insert into  date_parquet_table select 11, 'ravi', '2017-11-11'")
-    checkAnswer(spark.sql("select * from date_table"), spark.sql("select * from date_parquet_table"))
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-  }
-
-  test("test date filter datatype") {
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-    spark.sql("create table date_table(empno int, empname string, projdate Date) using carbon")
-    spark.sql("insert into  date_table select 11, 'ravi', '2017-11-11'")
-    spark.sql("select * from date_table where projdate=cast('2017-11-11' as date)").show()
-    spark.sql("create table date_parquet_table(empno int, empname string, projdate Date) using parquet")
-    spark.sql("insert into  date_parquet_table select 11, 'ravi', '2017-11-11'")
-    checkAnswer(spark.sql("select * from date_table where projdate=cast('2017-11-11' as date)"), spark.sql("select * from date_parquet_table where projdate=cast('2017-11-11' as date)"))
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-  }
-
-  test("test read and write with date datatype with wrong format") {
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-    spark.sql("create table date_table(empno int, empname string, projdate Date) using carbon")
-    spark.sql("insert into  date_table select 11, 'ravi', '11-11-2017'")
-    spark.sql("create table date_parquet_table(empno int, empname string, projdate Date) using parquet")
-    spark.sql("insert into  date_parquet_table select 11, 'ravi', '11-11-2017'")
-    checkAnswer(spark.sql("select * from date_table"), spark.sql("select * from date_parquet_table"))
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-  }
-
-  test("test read and write with timestamp datatype") {
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-    spark.sql("create table date_table(empno int, empname string, projdate timestamp) using carbon")
-    spark.sql("insert into  date_table select 11, 'ravi', '2017-11-11 00:00:01'")
-    spark.sql("create table date_parquet_table(empno int, empname string, projdate timestamp) using parquet")
-    spark.sql("insert into  date_parquet_table select 11, 'ravi', '2017-11-11 00:00:01'")
-    checkAnswer(spark.sql("select * from date_table"), spark.sql("select * from date_parquet_table"))
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-  }
-
-  test("test read and write with timestamp datatype with wrong format") {
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-    spark.sql("create table date_table(empno int, empname string, projdate timestamp) using carbon")
-    spark.sql("insert into  date_table select 11, 'ravi', '11-11-2017 00:00:01'")
-    spark.sql("create table date_parquet_table(empno int, empname string, projdate timestamp) using parquet")
-    spark.sql("insert into  date_parquet_table select 11, 'ravi', '11-11-2017 00:00:01'")
-    checkAnswer(spark.sql("select * from date_table"), spark.sql("select * from date_parquet_table"))
-    spark.sql("drop table if exists date_table")
-    spark.sql("drop table if exists date_parquet_table")
-  }
-
-  test("test write with array type with filter") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, Array("b", "c"), x))
-      .toDF("c1", "c2", "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 array<string>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table where c1='a1' and c2[0]='b'"), spark.sql("select * from parquet_table where c1='a1' and c2[0]='b'"))
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test write with struct type with filter") {
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, (Array("1", "2"), ("3", "4")),Array(("1", 1), ("2", 2)), x))
-      .toDF("c1", "c2", "c3",  "number")
-
-    df.write
-      .format("parquet").saveAsTable("parquet_table")
-    spark.sql("create table carbon_table(c1 string, c2 struct<a1:array<string>, a2:struct<a1:string, a2:string>>, c3 array<struct<a1:string, a2:int>>, number int) using carbon")
-    spark.sql("insert into carbon_table select * from parquet_table")
-    assert(spark.sql("select * from carbon_table").count() == 10)
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table"), spark.sql("select * from parquet_table"))
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table where c2.a1[0]='1' and c1='a1'"), spark.sql("select * from parquet_table where c2._1[0]='1' and c1='a1'"))
-    TestUtil.checkAnswer(spark.sql("select * from carbon_table where c2.a1[0]='1' and c3[0].a2=1"), spark.sql("select * from parquet_table where c2._1[0]='1' and c3[0]._2=1"))
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists parquet_table")
-  }
-
-
-  test("test read with df write string issue") {
-    spark.sql("drop table if exists test123")
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x.toShort , x, x.toLong, x.toDouble, BigDecimal.apply(x),  Array(x+1, x), ("b", BigDecimal.apply(x))))
-      .toDF("c1", "c2", "shortc", "intc", "longc", "doublec", "bigdecimalc", "arrayc", "structc")
-
-
-    // Saves dataframe to carbon file
-    df.write.format("carbon").save(warehouse1 + "/test_folder/")
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      spark
-        .sql(s"create table test123 (c1 string, c2 string, shortc smallint,intc int, longc bigint,  doublec double, bigdecimalc decimal(38,18), arrayc array<int>, structc struct<_1:string, _2:decimal(38,18)>) using carbon location '$warehouse1/test_folder/'")
-
-      checkAnswer(spark.sql("select * from test123"),
-        spark.read.format("carbon").load(warehouse1 + "/test_folder/"))
-    }
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    spark.sql("drop table if exists test123")
-  }
-
-  test("test read with df write with empty data") {
-    spark.sql("drop table if exists test123")
-    spark.sql("drop table if exists test123_par")
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    // Saves dataframe to carbon file
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      spark
-        .sql(s"create table test123 (c1 string, c2 string, arrayc array<int>, structc struct<_1:string, _2:decimal(38,18)>, shortc smallint,intc int, longc bigint,  doublec double, bigdecimalc decimal(38,18)) using carbon location '$warehouse1/test_folder/'")
-
-      spark
-        .sql(s"create table test123_par (c1 string, c2 string, arrayc array<int>, structc struct<_1:string, _2:decimal(38,18)>, shortc smallint,intc int, longc bigint,  doublec double, bigdecimalc decimal(38,18)) using carbon location '$warehouse1/test_folder/'")
-      TestUtil
-        .checkAnswer(spark.sql("select count(*) from test123"),
-          spark.sql("select count(*) from test123_par"))
-    }
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    spark.sql("drop table if exists test123")
-    spark.sql("drop table if exists test123_par")
-  }
-
-  test("test write with nosort columns") {
-    spark.sql("drop table if exists test123")
-    spark.sql("drop table if exists test123_par")
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", x.toShort , x, x.toLong, x.toDouble, BigDecimal.apply(x),  Array(x+1, x), ("b", BigDecimal.apply(x))))
-      .toDF("c1", "c2", "shortc", "intc", "longc", "doublec", "bigdecimalc", "arrayc", "structc")
-
-
-    // Saves dataframe to carbon file
-    df.write.format("parquet").saveAsTable("test123_par")
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      spark
-        .sql(s"create table test123 (c1 string, c2 string, shortc smallint,intc int, longc bigint,  doublec double, bigdecimalc decimal(38,18), arrayc array<int>, structc struct<_1:string, _2:decimal(38,18)>) using carbon options('sort_columns'='') location '$warehouse1/test_folder/'")
-
-      spark.sql(s"insert into test123 select * from test123_par")
-      checkAnswer(spark.sql("select * from test123"), spark.sql(s"select * from test123_par"))
-    }
-    spark.sql("drop table if exists test123")
-    spark.sql("drop table if exists test123_par")
-  }
-
-  test("test complex columns mismatch") {
-    spark.sql("drop table if exists array_com_hive")
-    spark.sql(s"drop table if exists array_com")
-    spark.sql("create table array_com_hive (CUST_ID string, YEAR int, MONTH int, AGE int, GENDER string, EDUCATED string, IS_MARRIED string, ARRAY_INT array<int>,ARRAY_STRING array<string>,ARRAY_DATE array<timestamp>,CARD_COUNT int,DEBIT_COUNT int, CREDIT_COUNT int, DEPOSIT double, HQ_DEPOSIT double) row format delimited fields terminated by ',' collection items terminated by '$'")
-    val sourceFile = FileFactory.getPath(s"$resource/Array.csv").toString
-    spark.sql(s"load data local inpath '$sourceFile' into table array_com_hive")
-    spark.sql("create table Array_com (CUST_ID string, YEAR int, MONTH int, AGE int, GENDER string, EDUCATED string, IS_MARRIED string, ARRAY_INT array<int>,ARRAY_STRING array<string>,ARRAY_DATE array<timestamp>,CARD_COUNT int,DEBIT_COUNT int, CREDIT_COUNT int, DEPOSIT double, HQ_DEPOSIT double) using carbon")
-    spark.sql("insert into Array_com select * from array_com_hive")
-    TestUtil.checkAnswer(spark.sql("select * from Array_com order by CUST_ID ASC limit 3"), spark.sql("select * from array_com_hive order by CUST_ID ASC limit 3"))
-    spark.sql("drop table if exists array_com_hive")
-    spark.sql(s"drop table if exists array_com")
-  }
-
-  test("test complex columns fail while insert ") {
-    spark.sql("drop table if exists STRUCT_OF_ARRAY_com_hive")
-    spark.sql(s"drop table if exists STRUCT_OF_ARRAY_com")
-    spark.sql(" create table STRUCT_OF_ARRAY_com_hive (CUST_ID string, YEAR int, MONTH int, AGE int, GENDER string, EDUCATED string, IS_MARRIED string, STRUCT_OF_ARRAY struct<ID: int,CHECK_DATE: timestamp ,SNo: array<int>,sal1: array<double>,state: array<string>,date1: array<timestamp>>,CARD_COUNT int,DEBIT_COUNT int, CREDIT_COUNT int, DEPOSIT float, HQ_DEPOSIT double) row format delimited fields terminated by ',' collection items terminated by '$' map keys terminated by '&'")
-    val sourceFile = FileFactory.getPath(s"$resource/structofarray.csv").toString
-    spark.sql(s"load data local inpath '$sourceFile' into table STRUCT_OF_ARRAY_com_hive")
-    spark.sql("create table STRUCT_OF_ARRAY_com (CUST_ID string, YEAR int, MONTH int, AGE int, GENDER string, EDUCATED string, IS_MARRIED string, STRUCT_OF_ARRAY struct<ID: int,CHECK_DATE: timestamp,SNo: array<int>,sal1: array<double>,state: array<string>,date1: array<timestamp>>,CARD_COUNT int,DEBIT_COUNT int, CREDIT_COUNT int, DEPOSIT double, HQ_DEPOSIT double) using carbon")
-    spark.sql(" insert into STRUCT_OF_ARRAY_com select * from STRUCT_OF_ARRAY_com_hive")
-    TestUtil.checkAnswer(spark.sql("select * from STRUCT_OF_ARRAY_com  order by CUST_ID ASC"), spark.sql("select * from STRUCT_OF_ARRAY_com_hive  order by CUST_ID ASC"))
-    spark.sql("drop table if exists STRUCT_OF_ARRAY_com_hive")
-    spark.sql(s"drop table if exists STRUCT_OF_ARRAY_com")
-  }
-
-  test("test partition error in carbon") {
-    spark.sql("drop table if exists carbon_par")
-    spark.sql("drop table if exists parquet_par")
-    spark.sql("create table carbon_par (name string, age int, country string) using carbon partitioned by (country)")
-    spark.sql("insert into carbon_par select 'b', '12', 'aa'")
-    spark.sql("create table parquet_par (name string, age int, country string) using carbon partitioned by (country)")
-    spark.sql("insert into parquet_par select 'b', '12', 'aa'")
-    checkAnswer(spark.sql("select * from carbon_par"), spark.sql("select * from parquet_par"))
-    spark.sql("drop table if exists carbon_par")
-    spark.sql("drop table if exists parquet_par")
-  }
-
-  test("test more cols error in carbon") {
-    spark.sql("drop table if exists h_jin")
-    spark.sql("drop table if exists c_jin")
-    spark.sql(s"""create table h_jin(RECORD_ID string,
-      CDR_ID string,LOCATION_CODE int,SYSTEM_ID string,
-      CLUE_ID string,HIT_ELEMENT string,CARRIER_CODE string,CAP_TIME date,
-      DEVICE_ID string,DATA_CHARACTER string,
-      NETCELL_ID string,NETCELL_TYPE int,EQU_CODE string,CLIENT_MAC string,
-      SERVER_MAC string,TUNNEL_TYPE string,TUNNEL_IP_CLIENT string,TUNNEL_IP_SERVER string,
-      TUNNEL_ID_CLIENT string,TUNNEL_ID_SERVER string,SIDE_ONE_TUNNEL_ID string,SIDE_TWO_TUNNEL_ID string,
-      CLIENT_IP string,SERVER_IP string,TRANS_PROTOCOL string,CLIENT_PORT int,SERVER_PORT int,APP_PROTOCOL string,
-      CLIENT_AREA bigint,SERVER_AREA bigint,LANGUAGE string,STYPE string,SUMMARY string,FILE_TYPE string,FILENAME string,
-      FILESIZE string,BILL_TYPE string,ORIG_USER_NUM string,USER_NUM string,USER_IMSI string,
-      USER_IMEI string,USER_BELONG_AREA_CODE string,USER_BELONG_COUNTRY_CODE string,
-      USER_LONGITUDE double,USER_LATITUDE double,USER_MSC string,USER_BASE_STATION string,
-      USER_CURR_AREA_CODE string,USER_CURR_COUNTRY_CODE string,USER_SIGNAL_POINT string,USER_IP string,
-      ORIG_OPPO_NUM string,OPPO_NUM string,OPPO_IMSI string,OPPO_IMEI string,OPPO_BELONG_AREA_CODE string,
-      OPPO_BELONG_COUNTRY_CODE string,OPPO_LONGITUDE double,OPPO_LATITUDE double,OPPO_MSC string,OPPO_BASE_STATION string,
-      OPPO_CURR_AREA_CODE string,OPPO_CURR_COUNTRY_CODE string,OPPO_SIGNAL_POINT string,OPPO_IP string,RING_TIME timestamp,
-      CALL_ESTAB_TIME timestamp,END_TIME timestamp,CALL_DURATION bigint,CALL_STATUS_CODE int,DTMF string,ORIG_OTHER_NUM string,
-      OTHER_NUM string,ROAM_NUM string,SEND_TIME timestamp,ORIG_SMS_CONTENT string,ORIG_SMS_CODE int,SMS_CONTENT string,SMS_NUM int,
-      SMS_COUNT int,REMARK string,CONTENT_STATUS int,VOC_LENGTH bigint,FAX_PAGE_COUNT int,COM_OVER_CAUSE int,ROAM_TYPE int,SGSN_ADDR string,GGSN_ADDR string,
-      PDP_ADDR string,APN_NI string,APN_OI string,CARD_ID string,TIME_OUT int,LOGIN_TIME timestamp,USER_IMPU string,OPPO_IMPU string,USER_LAST_IMPI string,
-      USER_CURR_IMPI string,SUPSERVICE_TYPE bigint,SUPSERVICE_TYPE_SUBCODE bigint,SMS_CENTERNUM string,USER_LAST_LONGITUDE double,USER_LAST_LATITUDE double,
-      USER_LAST_MSC string,USER_LAST_BASE_STATION string,LOAD_ID bigint,P_CAP_TIME string)  ROW format delimited FIELDS terminated by '|'""".stripMargin)
-    val sourceFile = FileFactory.getPath(s"$resource/j2.csv").toString
-    spark.sql(s"load data local inpath '$sourceFile' into table h_jin")
-    spark.sql(s"""create table c_jin(RECORD_ID string,
-      CDR_ID string,LOCATION_CODE int,SYSTEM_ID string,
-      CLUE_ID string,HIT_ELEMENT string,CARRIER_CODE string,CAP_TIME date,
-      DEVICE_ID string,DATA_CHARACTER string,
-      NETCELL_ID string,NETCELL_TYPE int,EQU_CODE string,CLIENT_MAC string,
-      SERVER_MAC string,TUNNEL_TYPE string,TUNNEL_IP_CLIENT string,TUNNEL_IP_SERVER string,
-      TUNNEL_ID_CLIENT string,TUNNEL_ID_SERVER string,SIDE_ONE_TUNNEL_ID string,SIDE_TWO_TUNNEL_ID string,
-      CLIENT_IP string,SERVER_IP string,TRANS_PROTOCOL string,CLIENT_PORT int,SERVER_PORT int,APP_PROTOCOL string,
-      CLIENT_AREA string,SERVER_AREA string,LANGUAGE string,STYPE string,SUMMARY string,FILE_TYPE string,FILENAME string,
-      FILESIZE string,BILL_TYPE string,ORIG_USER_NUM string,USER_NUM string,USER_IMSI string,
-      USER_IMEI string,USER_BELONG_AREA_CODE string,USER_BELONG_COUNTRY_CODE string,
-      USER_LONGITUDE double,USER_LATITUDE double,USER_MSC string,USER_BASE_STATION string,
-      USER_CURR_AREA_CODE string,USER_CURR_COUNTRY_CODE string,USER_SIGNAL_POINT string,USER_IP string,
-      ORIG_OPPO_NUM string,OPPO_NUM string,OPPO_IMSI string,OPPO_IMEI string,OPPO_BELONG_AREA_CODE string,
-      OPPO_BELONG_COUNTRY_CODE string,OPPO_LONGITUDE double,OPPO_LATITUDE double,OPPO_MSC string,OPPO_BASE_STATION string,
-      OPPO_CURR_AREA_CODE string,OPPO_CURR_COUNTRY_CODE string,OPPO_SIGNAL_POINT string,OPPO_IP string,RING_TIME timestamp,
-      CALL_ESTAB_TIME timestamp,END_TIME timestamp,CALL_DURATION string,CALL_STATUS_CODE int,DTMF string,ORIG_OTHER_NUM string,
-      OTHER_NUM string,ROAM_NUM string,SEND_TIME timestamp,ORIG_SMS_CONTENT string,ORIG_SMS_CODE int,SMS_CONTENT string,SMS_NUM int,
-      SMS_COUNT int,REMARK string,CONTENT_STATUS int,VOC_LENGTH string,FAX_PAGE_COUNT int,COM_OVER_CAUSE int,ROAM_TYPE int,SGSN_ADDR string,GGSN_ADDR string,
-      PDP_ADDR string,APN_NI string,APN_OI string,CARD_ID string,TIME_OUT int,LOGIN_TIME timestamp,USER_IMPU string,OPPO_IMPU string,USER_LAST_IMPI string,
-      USER_CURR_IMPI string,SUPSERVICE_TYPE string,SUPSERVICE_TYPE_SUBCODE string,SMS_CENTERNUM string,USER_LAST_LONGITUDE double,USER_LAST_LATITUDE double,
-      USER_LAST_MSC string,USER_LAST_BASE_STATION string,LOAD_ID string,P_CAP_TIME string) using carbon""".stripMargin)
-    spark.sql(s"""insert into c_jin
-      select
-      RECORD_ID,CDR_ID,LOCATION_CODE,SYSTEM_ID,
-      CLUE_ID,HIT_ELEMENT,CARRIER_CODE,CAP_TIME,
-      DEVICE_ID,DATA_CHARACTER,NETCELL_ID,NETCELL_TYPE,EQU_CODE,CLIENT_MAC,
-      SERVER_MAC,TUNNEL_TYPE,TUNNEL_IP_CLIENT,TUNNEL_IP_SERVER,
-      TUNNEL_ID_CLIENT,TUNNEL_ID_SERVER,SIDE_ONE_TUNNEL_ID,SIDE_TWO_TUNNEL_ID,
-      CLIENT_IP,SERVER_IP,TRANS_PROTOCOL,CLIENT_PORT,SERVER_PORT,APP_PROTOCOL,
-      CLIENT_AREA,SERVER_AREA,LANGUAGE,STYPE,SUMMARY,FILE_TYPE,FILENAME,
-      FILESIZE,BILL_TYPE,ORIG_USER_NUM,USER_NUM,USER_IMSI,
-      USER_IMEI,USER_BELONG_AREA_CODE,USER_BELONG_COUNTRY_CODE,
-      USER_LONGITUDE,USER_LATITUDE,USER_MSC,USER_BASE_STATION,
-      USER_CURR_AREA_CODE,USER_CURR_COUNTRY_CODE,USER_SIGNAL_POINT,USER_IP,
-      ORIG_OPPO_NUM,OPPO_NUM,OPPO_IMSI,OPPO_IMEI,OPPO_BELONG_AREA_CODE,
-      OPPO_BELONG_COUNTRY_CODE,OPPO_LONGITUDE,OPPO_LATITUDE,OPPO_MSC,OPPO_BASE_STATION,
-      OPPO_CURR_AREA_CODE,OPPO_CURR_COUNTRY_CODE,OPPO_SIGNAL_POINT,OPPO_IP,RING_TIME,
-      CALL_ESTAB_TIME,END_TIME,CALL_DURATION,CALL_STATUS_CODE,DTMF,ORIG_OTHER_NUM,
-      OTHER_NUM,ROAM_NUM,SEND_TIME,ORIG_SMS_CONTENT,ORIG_SMS_CODE,SMS_CONTENT,SMS_NUM,
-      SMS_COUNT,REMARK,CONTENT_STATUS,VOC_LENGTH,FAX_PAGE_COUNT,COM_OVER_CAUSE,ROAM_TYPE,SGSN_ADDR,GGSN_ADDR,
-      PDP_ADDR,APN_NI,APN_OI,CARD_ID,TIME_OUT,LOGIN_TIME,USER_IMPU,OPPO_IMPU,USER_LAST_IMPI,
-      USER_CURR_IMPI,SUPSERVICE_TYPE,SUPSERVICE_TYPE_SUBCODE,SMS_CENTERNUM,USER_LAST_LONGITUDE,USER_LAST_LATITUDE,
-      USER_LAST_MSC,USER_LAST_BASE_STATION,LOAD_ID,P_CAP_TIME
-      from h_jin""".stripMargin)
-    assert(spark.sql("select * from c_jin").collect().length == 1)
-    spark.sql("drop table if exists h_jin")
-    spark.sql("drop table if exists c_jin")
-  }
-
-  test("test write and create table with sort columns not allow") {
-    spark.sql("drop table if exists test123")
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    import spark.implicits._
-    val df = spark.sparkContext.parallelize(1 to 10)
-      .map(x => ("a" + x % 10, "b", "c" + x, "d" + x, x.toShort, x, x.toLong, x.toDouble, BigDecimal
-        .apply(x)))
-      .toDF("c1", "c2", "c3", "c4", "shortc", "intc", "longc", "doublec", "bigdecimalc")
-
-    // Saves dataframe to carbon file
-    df.write.format("carbon").save(s"$warehouse1/test_folder/")
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      intercept[UnsupportedOperationException] {
-        spark
-          .sql(s"create table test123 using carbon options('sort_columns'='shortc,c2') location " +
-               s"'$warehouse1/test_folder/'")
-      }
-    }
-    spark.sql("drop table if exists test123")
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-  }
-
-  test("valdate if path not specified during table creation") {
-    spark.sql("drop table if exists test123")
-    val ex = intercept[AnalysisException] {
-      spark.sql(s"create table test123 using carbon options('sort_columns'='shortc,c2')")
-    }
-    assert(ex.getMessage().contains("Unable to infer schema for carbon"))
-  }
-
-  test("test double boundary") {
-    spark.sql("drop table if exists par")
-    spark.sql("drop table if exists car")
-
-    spark.sql("create table par (c1 string, c2 double, n int) using parquet")
-    spark.sql("create table car (c1 string, c2 double, n int) using carbon")
-    spark.sql("insert into par select 'a', 1.7986931348623157E308, 215565665556")
-        spark.sql("insert into car select 'a', 1.7986931348623157E308, 215565665556")
-
-    checkAnswer(spark.sql("select * from car"), spark.sql("select * from par"))
-    spark.sql("drop table if exists par")
-    spark.sql("drop table if exists car")
-  }
-
-  test("test clearing datamaps") {
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      import spark.implicits._
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-      spark.sql("drop table if exists testparquet")
-      spark.sql("drop table if exists carbon_table")
-      spark.sql("drop table if exists carbon_table1")
-      // Saves dataframe to carbon file
-      df.write
-        .format("parquet").saveAsTable("testparquet")
-      spark.sql("create table carbon_table(c1 string, c2 string, number int) using carbon")
-      spark.sql("create table carbon_table1(c1 string, c2 string, number int) using carbon")
-      spark.sql("insert into carbon_table select * from testparquet")
-      spark.sql("insert into carbon_table1 select * from testparquet")
-      DataMapStoreManager.getInstance().getAllDataMaps.clear()
-      spark.sql("select * from carbon_table where c1='a1'").collect()
-      assert(DataMapStoreManager.getInstance().getAllDataMaps.size() == 1)
-      spark.sql("select * from carbon_table where c1='a2'").collect()
-      assert(DataMapStoreManager.getInstance().getAllDataMaps.size() == 1)
-      spark.sql("select * from carbon_table1 where c1='a1'").collect()
-      assert(DataMapStoreManager.getInstance().getAllDataMaps.size() == 2)
-      spark.sql("select * from carbon_table1 where c1='a2'").collect()
-      assert(DataMapStoreManager.getInstance().getAllDataMaps.size() == 2)
-      DataMapStoreManager.getInstance()
-        .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table"))
-      assert(DataMapStoreManager.getInstance().getAllDataMaps.size() == 1)
-      DataMapStoreManager.getInstance()
-        .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/carbon_table1"))
-      assert(DataMapStoreManager.getInstance().getAllDataMaps.size() == 0)
-      spark.sql("drop table if exists testparquet")
-      spark.sql("drop table if exists carbon_table")
-      spark.sql("drop table if exists carbon_table1")
-    }
-  }
-
-  test("test write using multi subfolder") {
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-      import spark.implicits._
-      val df = spark.sparkContext.parallelize(1 to 10)
-        .map(x => ("a" + x % 10, "b", x))
-        .toDF("c1", "c2", "number")
-
-      // Saves dataframe to carbon file
-      df.write.format("carbon").save(warehouse1 + "/test_folder/1/" + System.nanoTime())
-      df.write.format("carbon").save(warehouse1 + "/test_folder/2/" + System.nanoTime())
-      df.write.format("carbon").save(warehouse1 + "/test_folder/3/" + System.nanoTime())
-
-      val frame = spark.read.format("carbon").load(warehouse1 + "/test_folder")
-      assert(frame.count() == 30)
-      assert(frame.where("c1='a1'").count() == 3)
-      val mapSize = DataMapStoreManager.getInstance().getAllDataMaps.size()
-      DataMapStoreManager.getInstance()
-        .clearDataMaps(AbsoluteTableIdentifier.from(warehouse1 + "/test_folder"))
-      assert(mapSize > DataMapStoreManager.getInstance().getAllDataMaps.size())
-      FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(warehouse1 + "/test_folder"))
-    }
-  }
-
-  test("test read using old data") {
-    val store = new StoreCreator(new File(warehouse1).getAbsolutePath,
-      new File(warehouse1 + "../../../../../hadoop/src/test/resources/data.csv").getCanonicalPath)
-    store.createCarbonStore()
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/testdb/testtable/Fact/Part0/Segment_0/0"))
-    val dfread = spark.read.format("carbon").load(warehouse1+"/testdb/testtable/Fact/Part0/Segment_0")
-    dfread.show(false)
-    spark.sql("drop table if exists parquet_table")
-  }
-
-  test("test read using different sort order data") {
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, "test")
-    if (!spark.sparkContext.version.startsWith("2.1")) {
-      spark.sql("drop table if exists old_comp")
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1 + "/testdb"))
-      val store = new StoreCreator(new File(warehouse1).getAbsolutePath,
-        new File(warehouse1 + "../../../../../hadoop/src/test/resources/data.csv").getCanonicalPath)
-      store.setSortColumns(new util.ArrayList[String](Seq("name").asJava))
-      var model = store.createTableAndLoadModel(false)
-      model.setSegmentId("0")
-      store.createCarbonStore(model)
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1 + "/testdb/testtable/Fact/Part0/Segment_0/0"))
-      store.setSortColumns(new util.ArrayList[String](Seq("country","phonetype").asJava))
-      model = store.createTableAndLoadModel(false)
-      model.setSegmentId("1")
-      store.createCarbonStore(model)
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1 + "/testdb/testtable/Fact/Part0/Segment_1/0"))
-      store.setSortColumns(new util.ArrayList[String](Seq("date").asJava))
-      model = store.createTableAndLoadModel(false)
-      model.setSegmentId("2")
-      store.createCarbonStore(model)
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1 + "/testdb/testtable/Fact/Part0/Segment_2/0"))
-      store.setSortColumns(new util.ArrayList[String](Seq("serialname").asJava))
-      model = store.createTableAndLoadModel(false)
-      model.setSegmentId("3")
-      store.createCarbonStore(model)
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1 + "/testdb/testtable/Fact/Part0/Segment_3/0"))
-      spark.sql(s"create table old_comp(id int, date string, country string, name string, phonetype string, serialname string, salary int) using carbon options(path='$warehouse1/testdb/testtable/Fact/Part0/', 'sort_columns'='name')")
-
-      assert(spark.sql("select * from old_comp where country='china'").count() == 3396)
-      assert(spark.sql("select * from old_comp ").count() == 4000)
-      spark.sql("drop table if exists old_comp")
-
-      spark.sql(s"create table old_comp1 using carbon options(path='$warehouse1/testdb/testtable/Fact/Part0/')")
-      assert(spark.sql("select * from old_comp1 where country='china'").count() == 3396)
-      assert(spark.sql("select * from old_comp1 ").count() == 4000)
-      spark.sql("drop table if exists old_comp1")
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1 + "/testdb"))
-    }
-  }
-
-
-  test("test write sdk and read with spark using different sort order data") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk"))
-    buildTestDataOtherDataType(5, Array("age", "address"), warehouse1+"/sdk")
-    spark.sql(s"create table sdkout using carbon options(path='$warehouse1/sdk')")
-    assert(spark.sql("select * from sdkout").collect().length == 5)
-    buildTestDataOtherDataType(5, Array("name","salary"), warehouse1+"/sdk")
-    spark.sql("refresh table sdkout")
-    assert(spark.sql("select * from sdkout where name = 'name1'").collect().length == 2)
-    assert(spark.sql("select * from sdkout where salary=100").collect().length == 2)
-    buildTestDataOtherDataType(5, Array("name","age"), warehouse1+"/sdk")
-    spark.sql("refresh table sdkout")
-    assert(spark.sql("select * from sdkout where name='name0'").collect().length == 3)
-    assert(spark.sql("select * from sdkout").collect().length == 15)
-    assert(spark.sql("select * from sdkout where salary=100").collect().length == 3)
-    assert(spark.sql("select * from sdkout where address='address1'").collect().length == 3)
-    buildTestDataOtherDataType(5, Array("name","salary"), warehouse1+"/sdk")
-    spark.sql("refresh table sdkout")
-    assert(spark.sql("select * from sdkout where name='name0'").collect().length == 4)
-    assert(spark.sql("select * from sdkout").collect().length == 20)
-    assert(spark.sql("select * from sdkout where salary=100").collect().length == 4)
-    assert(spark.sql("select * from sdkout where address='address1'").collect().length == 4)
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk"))
-  }
-
-  test("test write sdk with different schema and read with spark") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    buildTestDataOtherDataType(5, Array("age", "address"), warehouse1+"/sdk1")
-    spark.sql(s"create table sdkout using carbon options(path='$warehouse1/sdk1')")
-    assert(spark.sql("select * from sdkout").collect().length == 5)
-    buildTestDataOtherDataType(5, null, warehouse1+"/sdk1", 2)
-    spark.sql("refresh table sdkout")
-    assert(spark.sql("select * from sdkout").count() == 10)
-    assert(spark.sql("select * from sdkout where salary=100").count() == 1)
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-  }
-
-  test("test Float data type by giving schema explicitly and desc formatted") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    buildTestDataOtherDataType(5, Array("age", "address"), warehouse1+"/sdk1")
-    spark.sql(s"create table sdkout(male boolean, age int, height double, name string, address " +
-              s"string," +
-              s"salary long, floatField float, bytefield byte) using carbon options " +
-              s"(path='$warehouse1/sdk1')")
-    assert(spark.sql("desc formatted sdkout").collect().take(7).reverse.head.get(1).equals("float"))
-    assert(spark.sql("desc formatted sdkout").collect().take(8).reverse.head.get(1).equals
-    ("tinyint"))
-  }
-
-  test("test select * on table with float data type") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    buildTestDataOtherDataType(11, Array("age", "address"), warehouse1 + "/sdk1")
-    spark.sql(s"create table sdkout(male boolean, age int, height double, name string, address " +
-              s"string," +
-              s"salary long, floatField float, bytefield byte) using carbon options (path='$warehouse1/sdk1')")
-    checkAnswer(spark.sql("select * from par_table"), spark.sql("select * from sdkout"))
-    checkAnswer(spark.sql("select floatfield from par_table"), spark.sql("select floatfield from sdkout"))
-  }
-
-  test("test various filters on float data") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    buildTestDataOtherDataType(11, Array("age", "address"), warehouse1 + "/sdk1")
-    spark.sql(s"create table sdkout(male boolean, age int, height double, name string, address " +
-              s"string," +
-              s"salary long, floatField float, bytefield byte) using carbon options (path='$warehouse1/sdk1')")
-    checkAnswer(spark.sql("select * from par_table where floatfield < 10"),
-      spark.sql("select * from sdkout where floatfield < 10"))
-    checkAnswer(spark.sql("select * from par_table where floatfield > 5.3"),
-      spark.sql("select * from sdkout where floatfield > 5.3"))
-    checkAnswer(spark.sql("select * from par_table where floatfield >= 4.1"),
-      spark.sql("select * from sdkout where floatfield >= 4.1"))
-    checkAnswer(spark.sql("select * from par_table where floatfield != 5.5"),
-      spark.sql("select * from sdkout where floatfield != 5.5"))
-    checkAnswer(spark.sql("select * from par_table where floatfield <= 5"),
-      spark.sql("select * from sdkout where floatfield <= 5"))
-    checkAnswer(spark.sql("select * from par_table where floatfield >= 5"),
-      spark.sql("select * from sdkout where floatfield >= 5"))
-    checkAnswer(spark.sql("select * from par_table where floatfield IN ('5.5','6.6')"),
-      spark.sql("select * from sdkout where floatfield IN ('5.5','6.6')"))
-    checkAnswer(spark.sql("select * from par_table where floatfield NOT IN ('5.5','6.6')"),
-      spark.sql("select * from sdkout where floatfield NOT IN ('5.5','6.6')"))
-    checkAnswer(spark.sql("select * from par_table where floatfield = cast('6.6' as float)"),
-      spark.sql("select * from sdkout where floatfield = cast('6.6' as float)"))
-  }
-
-  test("test select * on table with byte data type") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    buildTestDataOtherDataType(11, Array("age", "address"), warehouse1 + "/sdk1")
-    spark.sql(s"create table sdkout(male boolean, age int, height double, name string, address " +
-              s"string," +
-              s"salary long, floatField float, bytefield byte) using carbon options " +
-              s"(path='$warehouse1/sdk1')")
-    checkAnswer(spark.sql("select * from par_table"), spark.sql("select * from sdkout"))
-    checkAnswer(spark.sql("select byteField from par_table"), spark.sql("select bytefield from sdkout"))
-  }
-
-  test("test various filters on byte data") {
-    spark.sql("drop table if exists sdkout")
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    buildTestDataOtherDataType(11, Array("age", "address"), warehouse1 + "/sdk1")
-    spark.sql(s"create table sdkout(male boolean, age int, height double, name string, address " +
-              s"string," +
-              s"salary long, floatField float, bytefield byte) using carbon options " +
-              s"(path='$warehouse1/sdk1')")
-    checkAnswer(spark.sql("select * from par_table where bytefield < 10"),
-      spark.sql("select * from sdkout where bytefield < 10"))
-    checkAnswer(spark.sql("select * from par_table where bytefield > 5"),
-      spark.sql("select * from sdkout where bytefield > 5"))
-    checkAnswer(spark.sql("select * from par_table where bytefield >= 4"),
-      spark.sql("select * from sdkout where bytefield >= 4"))
-    checkAnswer(spark.sql("select * from par_table where bytefield != 5"),
-      spark.sql("select * from sdkout where bytefield != 5"))
-    checkAnswer(spark.sql("select * from par_table where bytefield <= 5"),
-      spark.sql("select * from sdkout where bytefield <= 5"))
-    checkAnswer(spark.sql("select * from par_table where bytefield >= 5"),
-      spark.sql("select * from sdkout where bytefield >= 5"))
-    checkAnswer(spark.sql("select * from par_table where bytefield IN ('5','6')"),
-      spark.sql("select * from sdkout where bytefield IN ('5','6')"))
-    checkAnswer(spark.sql("select * from par_table where bytefield NOT IN ('5','6')"),
-      spark.sql("select * from sdkout where bytefield NOT IN ('5','6')"))
-  }
-
-  test("test struct of float type and byte type") {
-    import scala.collection.JavaConverters._
-    val path = FileFactory.getPath(warehouse1+"/sdk1").toString
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    spark.sql("drop table if exists complextable")
-    val fields = List(new StructField
-    ("byteField", DataTypes.BYTE), new StructField("floatField", DataTypes.FLOAT))
-    val structType = Array(new Field("stringfield", DataTypes.STRING), new Field
-    ("structField", "struct", fields.asJava))
-
-
-    try {
-      val builder = CarbonWriter.builder()
-      val writer =
-        builder.outputPath(path)
-          .uniqueIdentifier(System.nanoTime()).withBlockSize(2)
-          .withCsvInput(new Schema(structType)).writtenBy("SparkCarbonDataSourceTest").build()
-
-      var i = 0
-      while (i < 11) {
-        val array = Array[String](s"name$i", s"$i" + "\001" +s"$i.${i}12")
-        writer.write(array)
-        i += 1
-      }
-      writer.close()
-      if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-        if (!FileFactory.isFileExist(path)) {
-          FileFactory.createDirectoryAndSetPermission(path,
-            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-        }
-        spark.sql("create table complextable (stringfield string, structfield struct<bytefield: " +
-          "byte, floatfield: float>) " +
-          s"using carbon options(path '$path')")
-      } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-        spark.sql("create table complextable (stringfield string, structfield struct<bytefield: " +
-          "byte, floatfield: float>) " +
-          s"using carbon location '$path'")
-      }
-    } catch {
-      case ex: Exception => throw new RuntimeException(ex)
-      case _: Throwable => None
-    }
-    checkAnswer(spark.sql("select * from complextable limit 1"), Seq(Row("name0", Row(0
-      .asInstanceOf[Byte], 0.012.asInstanceOf[Float]))))
-    checkAnswer(spark.sql("select * from complextable where structfield.bytefield > 9"), Seq(Row
-    ("name10", Row(10.asInstanceOf[Byte], 10.1012.asInstanceOf[Float]))))
-    checkAnswer(spark.sql("select * from complextable where structfield.bytefield > 9"), Seq(Row
-    ("name10", Row(10.asInstanceOf[Byte], 10.1012.asInstanceOf[Float]))))
-    checkAnswer(spark.sql("select * from complextable where structfield.floatfield > 9.912"), Seq
-    (Row
-    ("name10", Row(10.asInstanceOf[Byte], 10.1012.asInstanceOf[Float]))))
-    checkAnswer(spark.sql("select * from complextable where structfield.floatfield > 9.912 and " +
-                          "structfield.bytefield < 11"), Seq(Row("name10", Row(10.asInstanceOf[Byte], 10.1012.asInstanceOf[Float]))))
-  }
-
-  test("test bytefield as sort column") {
-    val path = FileFactory.getPath(warehouse1+"/sdk1").toString
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    var fields: Array[Field] = new Array[Field](8)
-    // same column name, but name as boolean type
-    fields(0) = new Field("age", DataTypes.INT)
-    fields(1) = new Field("height", DataTypes.DOUBLE)
-    fields(2) = new Field("name", DataTypes.STRING)
-    fields(3) = new Field("address", DataTypes.STRING)
-    fields(4) = new Field("salary", DataTypes.LONG)
-    fields(5) = new Field("bytefield", DataTypes.BYTE)
-
-    try {
-      val builder = CarbonWriter.builder()
-      val writer =
-        builder.outputPath(path)
-          .uniqueIdentifier(System.nanoTime()).withBlockSize(2).sortBy(Array("bytefield"))
-          .withCsvInput(new Schema(fields)).writtenBy("SparkCarbonDataSourceTest").build()
-
-      var i = 0
-      while (i < 11) {
-        val array = Array[String](
-          String.valueOf(i),
-          String.valueOf(i.toDouble / 2),
-          "name" + i,
-          "address" + i,
-          (i * 100).toString,
-          s"${10 - i}")
-        writer.write(array)
-        i += 1
-      }
-      writer.close()
-      spark.sql("drop table if exists sorted_par")
-      spark.sql("drop table if exists sort_table")
-      val path2 = s"$warehouse1/../warehouse2";
-      if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-        if (!FileFactory.isFileExist(path)) {
-          FileFactory.createDirectoryAndSetPermission(path,
-            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-        }
-        spark.sql(s"create table sort_table (age int, height double, name string, address string," +
-          s" salary long, bytefield byte) using carbon  options(path '$path')")
-        FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(s"$warehouse1/../warehouse2"))
-        if (!FileFactory.isFileExist(path2)) {
-          FileFactory.createDirectoryAndSetPermission(path2,
-            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-        }
-        spark.sql(s"create table sorted_par(age int, height double, name string, address " +
-          s"string," +
-          s"salary long, bytefield byte) using parquet options(path " +
-          s"'$path2')")
-      } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-        spark.sql(s"create table sort_table (age int, height double, name string, address string," +
-          s" salary long, bytefield byte) using carbon location '$path'")
-        FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(s"$warehouse1/../warehouse2"))
-        spark.sql(s"create table sorted_par(age int, height double, name string, address " +
-          s"string," +
-          s"salary long, bytefield byte) using parquet location " +
-          s"'$warehouse1/../warehouse2'")
-      }
-
-      (0 to 10).foreach {
-        i =>
-          spark.sql(s"insert into sorted_par select '$i', ${ i.toDouble / 2 }, 'name$i', " +
-                    s"'address$i', ${ i * 100 }, '${ 10 - i }'")
-      }
-      checkAnswer(spark.sql("select * from sorted_par order by bytefield"),
-        spark.sql("select * from sort_table"))
-    } catch {
-      case ex: Exception => throw new RuntimeException(ex)
-      case _: Throwable => None
-    }
-  }
-
-  test("test array of float type and byte type") {
-    import scala.collection.JavaConverters._
-    val path = FileFactory.getPath(warehouse1+"/sdk1").toString
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    spark.sql("drop table if exists complextable")
-    val structType =
-      Array(new Field("stringfield", DataTypes.STRING),
-        new Field("bytearray", "array", List(new StructField("byteField", DataTypes.BYTE))
-          .asJava),
-        new Field("floatarray", "array", List(new StructField("floatfield", DataTypes.FLOAT))
-          .asJava))
-
-    try {
-      val builder = CarbonWriter.builder()
-      val writer =
-        builder.outputPath(path)
-          .uniqueIdentifier(System.nanoTime()).withBlockSize(2)
-          .withCsvInput(new Schema(structType)).writtenBy("SparkCarbonDataSourceTest").build()
-
-      var i = 0
-      while (i < 10) {
-        val array = Array[String](s"name$i",s"$i" + "\001" + s"${i*2}", s"${i/2}" + "\001" + s"${i/3}")
-        writer.write(array)
-        i += 1
-      }
-      writer.close()
-      if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-        if (!FileFactory.isFileExist(path)) {
-          FileFactory.createDirectoryAndSetPermission(path,
-            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-        }
-        spark.sql(s"create table complextable (stringfield string, bytearray " +
-          s"array<byte>, floatarray array<float>) using carbon " +
-          s"options( path " +
-          s"'$path')")
-      } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-        spark.sql(s"create table complextable (stringfield string, bytearray " +
-          s"array<byte>, floatarray array<float>) using carbon " +
-          s"location " +
-          s"'$path'")
-      }
-    } catch {
-      case ex: Exception => throw new RuntimeException(ex)
-      case _: Throwable => None
-    }
-    checkAnswer(spark.sql("select * from complextable limit 1"), Seq(Row("name0", mutable
-      .WrappedArray.make(Array[Byte](0, 0)), mutable.WrappedArray.make(Array[Float](0.0f, 0.0f)))))
-    checkAnswer(spark.sql("select * from complextable where bytearray[0] = 1"), Seq(Row("name1",
-      mutable.WrappedArray.make(Array[Byte](1, 2)), mutable.WrappedArray.make(Array[Float](0.0f,
-        0.0f)))))
-    checkAnswer(spark.sql("select * from complextable where bytearray[0] > 8"), Seq(Row("name9",
-      mutable.WrappedArray.make(Array[Byte](9, 18)), mutable.WrappedArray.make(Array[Float](4.0f,
-        3.0f)))))
-    checkAnswer(spark.sql("select * from complextable where floatarray[0] IN (4.0) and stringfield = 'name8'"), Seq(Row
-    ("name8",
-      mutable.WrappedArray.make(Array[Byte](8, 16)), mutable.WrappedArray.make(Array[Float](4.0f,
-      2.0f)))))
-  }
-
-  private def createParquetTable {
-    val path = FileFactory.getUpdatedFilePath(s"$warehouse1/../warehouse2")
-    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(s"$path"))
-    if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-      if (!FileFactory.isFileExist(path)) {
-        FileFactory.createDirectoryAndSetPermission(path,
-          new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-      }
-      spark.sql(s"create table par_table(male boolean, age int, height double, name string, address " +
-        s"string," +
-        s"salary long, floatField float, bytefield byte) using parquet options(path '$path')")
-    } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-      spark.sql(s"create table par_table(male boolean, age int, height double, name string, address " +
-        s"string," +
-        s"salary long, floatField float, bytefield byte) using parquet location '$path'")
-    }
-
-    (0 to 10).foreach {
-      i => spark.sql(s"insert into par_table select 'true','$i', ${i.toDouble / 2}, 'name$i', " +
-                     s"'address$i', ${i*100}, $i.$i, '$i'")
-    }
-  }
-
-  // prepare sdk writer output with other schema
-  def buildTestDataOtherDataType(rows: Int, sortColumns: Array[String], writerPath: String, colCount: Int = -1): Any = {
-    var fields: Array[Field] = new Array[Field](8)
-    // same column name, but name as boolean type
-    fields(0) = new Field("male", DataTypes.BOOLEAN)
-    fields(1) = new Field("age", DataTypes.INT)
-    fields(2) = new Field("height", DataTypes.DOUBLE)
-    fields(3) = new Field("name", DataTypes.STRING)
-    fields(4) = new Field("address", DataTypes.STRING)
-    fields(5) = new Field("salary", DataTypes.LONG)
-    fields(6) = new Field("floatField", DataTypes.FLOAT)
-    fields(7) = new Field("bytefield", DataTypes.BYTE)
-
-    if (colCount > 0) {
-      val fieldsToWrite: Array[Field] = new Array[Field](colCount)
-      var i = 0
-      while (i < colCount) {
-        fieldsToWrite(i) = fields(i)
-        i += 1
-      }
-      fields = fieldsToWrite
-    }
-
-    try {
-      val builder = CarbonWriter.builder()
-      val writer =
-        builder.outputPath(writerPath)
-          .uniqueIdentifier(System.nanoTime()).withBlockSize(2).sortBy(sortColumns)
-          .withCsvInput(new Schema(fields)).writtenBy("SparkCarbonDataSourceTest").build()
-
-      var i = 0
-      while (i < rows) {
-        val array = Array[String]("true",
-          String.valueOf(i),
-          String.valueOf(i.toDouble / 2),
-          "name" + i,
-          "address" + i,
-          (i * 100).toString,
-          s"$i.$i", s"$i")
-        if (colCount > 0) {
-          writer.write(array.slice(0, colCount))
-        } else {
-          writer.write(array)
-        }
-        i += 1
-      }
-      writer.close()
-    } catch {
-      case ex: Exception => throw new RuntimeException(ex)
-      case _: Throwable => None
-    }
-  }
-
-  def buildStructSchemaWithNestedArrayOfMapTypeAsValue(writerPath: String, rows: Int): Unit = {
-    FileFactory.deleteAllFilesOfDir(new File(writerPath))
-    val mySchema =
-      """
-        |{
-        |  "name": "address",
-        |  "type": "record",
-        |  "fields": [
-        |    {
-        |      "name": "name",
-        |      "type": "string"
-        |    },
-        |    {
-        |      "name": "age",
-        |      "type": "int"
-        |    },
-        |    {
-        |      "name": "structRecord",
-        |      "type": {
-        |        "type": "record",
-        |        "name": "my_address",
-        |        "fields": [
-        |          {
-        |            "name": "street",
-        |            "type": "string"
-        |          },
-        |          {
-        |            "name": "houseDetails",
-        |            "type": {
-        |               "type": "array",
-        |               "items": {
-        |                   "name": "memberDetails",
-        |                   "type": "map",
-        |                   "values": "string"
-        |                }
-        |             }
-        |          }
-        |        ]
-        |      }
-        |    }
-        |  ]
-        |}
-      """.stripMargin
-    val json = """ {"name":"bob", "age":10, "structRecord": {"street":"street1", "houseDetails": [{"101": "Rahul", "102": "Pawan"}]}} """.stripMargin
-    TestUtil.WriteFilesWithAvroWriter(writerPath, rows, mySchema, json)
-  }
-
-  test("test external table with struct type with value as nested struct<array<map>> type") {
-    val writerPath: String = FileFactory.getUpdatedFilePath(warehouse1 + "/sdk1")
-    val rowCount = 3
-    buildStructSchemaWithNestedArrayOfMapTypeAsValue(writerPath, rowCount)
-    spark.sql("drop table if exists carbon_external")
-    if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-      if (!FileFactory.isFileExist(writerPath)) {
-        FileFactory.createDirectoryAndSetPermission(writerPath,
-          new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-      }
-      spark.sql(s"create table carbon_external using carbon options(path '$writerPath')")
-    } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-      spark.sql(s"create table carbon_external using carbon location '$writerPath'")
-    }
-    assert(spark.sql("select * from carbon_external").count() == rowCount)
-    spark.sql("drop table if exists carbon_external")
-  }
-
-  test("test byte and float for multiple pages") {
-    val path = FileFactory.getPath(warehouse1+"/sdk1").toString
-    FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    spark.sql("drop table if exists multi_page")
-    var fields: Array[Field] = new Array[Field](8)
-    // same column name, but name as boolean type
-    fields(0) = new Field("a", DataTypes.STRING)
-    fields(1) = new Field("b", DataTypes.FLOAT)
-    fields(2) = new Field("c", DataTypes.BYTE)
-
-    try {
-      val builder = CarbonWriter.builder()
-      val writer =
-        builder.outputPath(path)
-          .uniqueIdentifier(System.nanoTime()).withBlockSize(2)
-          .withCsvInput(new Schema(fields)).writtenBy("SparkCarbonDataSourceTest").build()
-
-      var i = 0
-      while (i < 33000) {
-        val array = Array[String](
-          String.valueOf(i),
-          s"$i.3200", "32")
-        writer.write(array)
-        i += 1
-      }
-      writer.close()
-      if (SparkUtil.isSparkVersionEqualTo("2.1")) {
-        if (!FileFactory.isFileExist(path)) {
-          FileFactory.createDirectoryAndSetPermission(path,
-            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-        }
-        spark.sql(s"create table multi_page (a string, b float, c byte) using carbon options(path " +
-          s"'$path')")
-      } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-        spark.sql(s"create table multi_page (a string, b float, c byte) using carbon location " +
-          s"'$path'")
-      }
-      assert(spark.sql("select * from multi_page").count() == 33000)
-    } catch {
-      case ex: Exception => throw new RuntimeException(ex)
-    } finally {
-      FileFactory.deleteAllFilesOfDir(new File(warehouse1+"/sdk1"))
-    }
-  }
-
-  test("test partition issue with add location") {
-    spark.sql("drop table if exists partitionTable_obs")
-    spark.sql("drop table if exists partitionTable_obs_par")
-    spark.sql(s"create table partitionTable_obs (id int,name String,email String) using carbon partitioned by(email) ")
-    spark.sql(s"create table partitionTable_obs_par (id int,name String,email String) using parquet partitioned by(email) ")
-    spark.sql("insert into partitionTable_obs select 1,'huawei','abc'")
-    spark.sql("insert into partitionTable_obs select 1,'huawei','bcd'")
-    spark.sql(s"alter table partitionTable_obs add partition (email='def') location '$warehouse1/test_folder121/'")
-    spark.sql("insert into partitionTable_obs select 1,'huawei','def'")
-
-    spark.sql("insert into partitionTable_obs_par select 1,'huawei','abc'")
-    spark.sql("insert into partitionTable_obs_par select 1,'huawei','bcd'")
-    spark.sql(s"alter table partitionTable_obs_par add partition (email='def') location '$warehouse1/test_folder122/'")
-    spark.sql("insert into partitionTable_obs_par select 1,'huawei','def'")
-
-    checkAnswer(spark.sql("select * from partitionTable_obs"), spark.sql("select * from partitionTable_obs_par"))
-    spark.sql("drop table if exists partitionTable_obs")
-    spark.sql("drop table if exists partitionTable_obs_par")
-  }
-
-  test("test multiple partition  select issue") {
-    spark.sql("drop table if exists t_carbn01b_hive")
-    spark.sql(s"drop table if exists t_carbn01b")
-    spark.sql("create table t_carbn01b_hive(Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Create_date String,Active_status String,Item_type_cd INT, Update_time TIMESTAMP, Discount_price DOUBLE)  using parquet partitioned by (Active_status,Item_type_cd, Update_time, Discount_price)")
-    spark.sql("create table t_carbn01b(Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Create_date String,Active_status String,Item_type_cd INT, Update_time TIMESTAMP, Discount_price DOUBLE)  using carbon partitioned by (Active_status,Item_type_cd, Update_time, Discount_price)")
-    spark.sql("insert into t_carbn01b partition(Active_status, Item_type_cd,Update_time,Discount_price) select * from t_carbn01b_hive")
-    spark.sql("alter table t_carbn01b add partition (active_status='xyz',Item_type_cd=12,Update_time=NULL,Discount_price='3000')")
-    spark.sql("insert overwrite table t_carbn01b select 'xyz', 12, 74,3000,20000000,121.5,4.99,2.44,'RE3423ee','dddd', 'ssss','2012-01-02 23:04:05.12', '2012-01-20'")
-    spark.sql("insert overwrite table t_carbn01b_hive select 'xyz', 12, 74,3000,20000000,121.5,4.99,2.44,'RE3423ee','dddd', 'ssss','2012-01-02 23:04:05.12', '2012-01-20'")
-    checkAnswer(spark.sql("select * from t_carbn01b_hive"), spark.sql("select * from t_carbn01b"))
-    spark.sql("drop table if exists t_carbn01b_hive")
-    spark.sql(s"drop table if exists t_carbn01b")
-    }
-
-  test("Test Float value by having negative exponents") {
-    spark.sql("DROP TABLE IF EXISTS float_p")
-    spark.sql("DROP TABLE IF EXISTS float_c")
-    spark.sql("CREATE TABLE float_p(f float) using parquet")
-    spark.sql("CREATE TABLE float_c(f float) using carbon")
-    spark.sql("INSERT INTO float_p select \"1.4E-3\"")
-    spark.sql("INSERT INTO float_p select \"1.4E-38\"")
-    spark.sql("INSERT INTO float_c select \"1.4E-3\"")
-    spark.sql("INSERT INTO float_c select \"1.4E-38\"")
-    checkAnswer(spark.sql("SELECT * FROM float_p"),
-      spark.sql("SELECT * FROM float_c"))
-    spark.sql("DROP TABLE float_p")
-    spark.sql("DROP TABLE float_c")
-  }
-
-  test("test fileformat flow with drop and query on same table") {
-    spark.sql("drop table if exists fileformat_drop")
-    spark.sql("drop table if exists fileformat_drop_hive")
-    spark.sql("create table fileformat_drop (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId double,productionDate Timestamp,deliveryDate timestamp,deliverycharge double) using carbon options('table_blocksize'='1','LOCAL_DICTIONARY_ENABLE'='TRUE','LOCAL_DICTIONARY_THRESHOLD'='1000')")
-    spark.sql("create table fileformat_drop_hive(imei string,deviceInformationId double,AMSize string,channelsId string,ActiveCountry string,Activecity string,gamePointId double,productionDate Timestamp,deliveryDate timestamp,deliverycharge double)row format delimited FIELDS terminated by ',' LINES terminated by '\n' stored as textfile")
-    val sourceFile = FileFactory.getPath(s"$resource/vardhandaterestruct.csv").toString
-    spark.sql(s"load data local inpath '$sourceFile' into table fileformat_drop_hive")
-    spark.sql("insert into fileformat_drop select imei ,deviceInformationId ,AMSize ,channelsId ,ActiveCountry ,Activecity ,gamePointId ,productionDate ,deliveryDate ,deliverycharge from fileformat_drop_hive")
-    assert(spark.sql("select count(*) from fileformat_drop where imei='1AA10000'").collect().length == 1)
-
-    spark.sql("drop table if exists fileformat_drop")
-    spark.sql("create table fileformat_drop (imei string,deviceInformationId double,AMSize string,channelsId string,ActiveCountry string,Activecity string,gamePointId float,productionDate timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) using carbon options('table_blocksize'='1','LOCAL_DICTIONARY_ENABLE'='true','local_dictionary_threshold'='1000')")
-    spark.sql("insert into fileformat_drop select imei ,deviceInformationId ,AMSize ,channelsId ,ActiveCountry ,Activecity ,gamePointId ,productionDate ,deliveryDate ,deliverycharge from fileformat_drop_hive")
-    assert(spark.sql("select count(*) from fileformat_drop where imei='1AA10000'").collect().length == 1)
-    spark.sql("drop table if exists fileformat_drop")
-    spark.sql("drop table if exists fileformat_drop_hive")
-  }
-
-  test("test complexdatype for date and timestamp datatype") {
-    spark.sql("drop table if exists fileformat_date")
-    spark.sql("drop table if exists fileformat_date_hive")
-    spark.sql("create table fileformat_date_hive(name string, age int, dob array<date>, joinTime array<timestamp>) using parquet")
-    spark.sql("create table fileformat_date(name string, age int, dob array<date>, joinTime array<timestamp>) using carbon")
-    spark.sql("insert into fileformat_date_hive select 'joey', 32, array('1994-04-06','1887-05-06'), array('1994-04-06 00:00:05','1887-05-06 00:00:08')")
-    spark.sql("insert into fileformat_date select 'joey', 32, array('1994-04-06','1887-05-06'), array('1994-04-06 00:00:05','1887-05-06 00:00:08')")
-    checkAnswer(spark.sql("select * from fileformat_date_hive"), spark.sql("select * from fileformat_date"))
-  }
-
-  test("validate the columns not present in schema") {
-    spark.sql("drop table if exists validate")
-    spark.sql("create table validate (name string, age int, address string) using carbon options('inverted_index'='abc')")
-    val ex = intercept[Exception] {
-      spark.sql("insert into validate select 'abc',4,'def'")
-    }
-    assert(ex.getMessage.contains("column: abc specified in inverted index columns does not exist in schema"))
-  }
-
-  var writerPath = new File(this.getClass.getResource("/").getPath
-          + "../../target/SparkCarbonFileFormat/WriterOutput/")
-          .getCanonicalPath
-
-  test("Don't support load for datasource") {
-    import spark._
-    sql("DROP TABLE IF EXISTS binaryCarbon")
-    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
-      sql(
-        s"""
-           | CREATE TABLE binaryCarbon(
-           |    binaryId INT,
-           |    binaryName STRING,
-           |    binary BINARY,
-           |    labelName STRING,
-           |    labelContent STRING
-           |) USING CARBON  """.stripMargin)
-
-      val exception = intercept[Exception] {
-        sql(s"load data local inpath '$writerPath' into table binaryCarbon")
-      }
-      assert(exception.getMessage.contains("LOAD DATA is not supported for datasource tables"))
-    }
-    sql("DROP TABLE IF EXISTS binaryCarbon")
-  }
-
-    test("test load data with binary_decoder in df") {
-        import spark._
-        try {
-            sql("DROP TABLE IF EXISTS carbon_table")
-            val rdd = spark.sparkContext.parallelize(1 to 3)
-                    .map(x => Row("a" + x % 10, "b", x, "YWJj".getBytes()))
-            val customSchema = StructType(Array(
-                SparkStructField("c1", StringType),
-                SparkStructField("c2", StringType),
-                SparkStructField("number", IntegerType),
-                SparkStructField("c4", BinaryType)))
-
-            val df = spark.createDataFrame(rdd, customSchema);
-            // Saves dataFrame to carbon file
-            df.write.format("carbon")
-                    .option("binary_decoder", "base64")
-                    .saveAsTable("carbon_table")
-            val path = warehouse1 + "/carbon_table"
-
-            val carbonDF = spark.read
-                    .format("carbon")
-                    .option("tablename", "carbon_table")
-                    .schema(customSchema)
-                    .load(path)  // TODO: check why can not read when without path
-            assert(carbonDF.schema.map(_.name) === Seq("c1", "c2", "number", "c4"))
-            // "YWJj" is base64 decode data of "abc" string,
-            // but spark doesn't support string for binary, so we use byte[] and
-            // carbon will not decode for byte
-            checkAnswer(carbonDF, Seq(Row("a1", "b", 1, "YWJj".getBytes()),
-                Row("a2", "b", 2, "YWJj".getBytes()),
-                Row("a3", "b", 3, "YWJj".getBytes())))
-
-            val carbonDF2 = carbonDF.drop("c1")
-            assert(carbonDF2.schema.map(_.name) === Seq("c2", "number", "c4"))
-            checkAnswer(sql(s"select * from carbon.`$path`"),
-                Seq(Row("a1", "b", 1, "YWJj".getBytes()),
-                    Row("a2", "b", 2, "YWJj".getBytes()),
-                    Row("a3", "b", 3, "YWJj".getBytes())))
-        } catch {
-            case e: Exception =>
-                e.printStackTrace()
-                assert(false)
-        } finally {
-            sql("DROP TABLE IF EXISTS carbon_table")
-        }
-    }
-
-    test("test spark doesn't support input string value for binary data type") {
-        val rdd = spark.sparkContext.parallelize(1 to 3)
-                .map(x => Row("a" + x % 10, "b", x, "YWJj".getBytes()))
-        val customSchema = StructType(Array(
-            SparkStructField("c1", StringType),
-            SparkStructField("c2", StringType),
-            SparkStructField("number", IntegerType),
-            SparkStructField("c4", BinaryType)))
-
-        try {
-            spark.createDataFrame(rdd, customSchema);
-        } catch {
-            case e: RuntimeException => e.getMessage.contains(
-                "java.lang.String is not a valid external type for schema of binary")
-        }
-    }
-
-  override protected def beforeAll(): Unit = {
-    drop
-    createParquetTable
-  }
-
-  override def afterAll(): Unit = {
-    drop
-  }
-
-  private def drop = {
-    spark.sql("drop table if exists testformat")
-    spark.sql("drop table if exists carbon_table")
-    spark.sql("drop table if exists testparquet")
-    spark.sql("drop table if exists par_table")
-    spark.sql("drop table if exists sdkout")
-    spark.sql("drop table if exists validate")
-    spark.sql("drop table if exists fileformat_date")
-  }
-}
diff --git a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala b/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
deleted file mode 100644
index 7b39391..0000000
--- a/integration/spark-datasource/src/test/scala/org/apache/spark/sql/carbondata/datasource/TestUtil.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.sql.carbondata.datasource
-
-import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, File, InputStream}
-
-import scala.collection.JavaConverters._
-
-import org.apache.avro
-import org.apache.avro.file.DataFileWriter
-import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
-import org.apache.avro.io.{DecoderFactory, Encoder}
-import org.apache.spark.sql.carbondata.execution.datasources.CarbonFileIndexReplaceRule
-import org.apache.spark.sql.{DataFrame, Row, SparkSession}
-import org.apache.spark.sql.catalyst.plans.logical
-import org.apache.spark.sql.catalyst.util.sideBySide
-import org.junit.Assert
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.datastore.impl.FileFactory
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.sdk.file.CarbonWriter
-
-object TestUtil {
-
-  val rootPath = new File(this.getClass.getResource("/").getPath
-                          + "../../../..").getCanonicalPath
-  val warehouse1 = FileFactory.getPath(s"$rootPath/integration/spark-datasource/target/warehouse").toString
-  val resource = s"$rootPath/integration/spark-datasource/src/test/resources"
-  val metaStoreDB1 = s"$rootPath/integration/spark-datasource/target"
-  val spark = SparkSession
-    .builder()
-    .enableHiveSupport()
-    .master("local")
-    .config("spark.sql.warehouse.dir", warehouse1)
-    .config("spark.driver.host", "localhost")
-    .config("spark.sql.crossJoin.enabled", "true")
-    .getOrCreate()
-  spark.sparkContext.setLogLevel("ERROR")
-  if (!spark.sparkContext.version.startsWith("2.1")) {
-    spark.experimental.extraOptimizations = Seq(new CarbonFileIndexReplaceRule)
-  }
-  CarbonProperties.getInstance()
-    .addProperty(CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT, "40")
-
-  def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]):Unit = {
-    checkAnswer(df, expectedAnswer.asScala)
-  }
-
-  def checkExistence(df: DataFrame, exists: Boolean, keywords: String*) {
-    val outputs = df.collect().map(_.mkString).mkString
-    for (key <- keywords) {
-      if (exists) {
-        assert(outputs.contains(key), s"Failed for $df ($key doesn't exist in result)")
-      } else {
-        assert(!outputs.contains(key), s"Failed for $df ($key existed in the result)")
-      }
-    }
-  }
-
-  def checkAnswer(df: DataFrame, expectedAnswer: DataFrame): Unit = {
-    checkAnswer(df, expectedAnswer.collect())
-  }
-
-  /**
-   * Runs the plan and makes sure the answer matches the expected result.
-   * If there was exception during the execution or the contents of the DataFrame does not
-   * match the expected result, an error message will be returned. Otherwise, a [[None]] will
-   * be returned.
-   * @param df the [[DataFrame]] to be executed
-   * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
-   */
-  def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Unit = {
-    val isSorted = df.logicalPlan.collect { case s: logical.Sort => s }.nonEmpty
-    def prepareAnswer(answer: Seq[Row]): Seq[Row] = {
-      // Converts data to types that we can do equality comparison using Scala collections.
-      // For BigDecimal type, the Scala type has a better definition of equality test (similar to
-      // Java's java.math.BigDecimal.compareTo).
-      // For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for
-      // equality test.
-      val converted: Seq[Row] = answer.map { s =>
-        Row.fromSeq(s.toSeq.map {
-          case d: java.math.BigDecimal => BigDecimal(d)
-          case b: Array[Byte] => b.toSeq
-          case d : Double =>
-            if (!d.isInfinite && !d.isNaN) {
-              var bd = BigDecimal(d)
-              bd = bd.setScale(5, BigDecimal.RoundingMode.UP)
-              bd.doubleValue()
-            }
-            else {
-              d
-            }
-          case o => o
-        })
-      }
-      if (!isSorted) converted.sortBy(_.toString()) else converted
-    }
-    val sparkAnswer = df.collect().toSeq
-
-    if (prepareAnswer(expectedAnswer) != prepareAnswer(sparkAnswer)) {
-      val errorMessage =
-        s"""
-           |Results do not match for query:
-           |${df.queryExecution}
-           |== Results ==
-           |${
-          sideBySide(
-            s"== Correct Answer - ${expectedAnswer.size} ==" +:
-            prepareAnswer(expectedAnswer).map(_.toString()),
-            s"== Spark Answer - ${sparkAnswer.size} ==" +:
-            prepareAnswer(sparkAnswer).map(_.toString())).mkString("\n")
-        }
-      """.stripMargin
-      assert(false, errorMessage)
-    }
-  }
-
-  def WriteFilesWithAvroWriter(writerPath: String,
-      rows: Int,
-      mySchema: String,
-      json: String) = {
-    // conversion to GenericData.Record
-    val nn = new avro.Schema.Parser().parse(mySchema)
-    val record = jsonToAvro(json, mySchema)
-    try {
-      val writer = CarbonWriter.builder
-        .outputPath(writerPath)
-        .uniqueIdentifier(System.currentTimeMillis()).withAvroInput(nn).writtenBy("DataSource").build()
-      var i = 0
-      while (i < rows) {
-        writer.write(record)
-        i = i + 1
-      }
-      writer.close()
-    }
-    catch {
-      case e: Exception => {
-        e.printStackTrace()
-        Assert.fail(e.getMessage)
-      }
-    }
-  }
-
-  private def jsonToAvro(json: String, avroSchema: String): GenericRecord = {
-    var input: InputStream = null
-    var writer: DataFileWriter[GenericRecord] = null
-    var encoder: Encoder = null
-    var output: ByteArrayOutputStream = null
-    try {
-      val schema = new org.apache.avro.Schema.Parser().parse(avroSchema)
-      val reader = new GenericDatumReader[GenericRecord](schema)
-      input = new ByteArrayInputStream(json.getBytes())
-      output = new ByteArrayOutputStream()
-      val din = new DataInputStream(input)
-      writer = new DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord]())
-      writer.create(schema, output)
-      val decoder = DecoderFactory.get().jsonDecoder(schema, din)
-      var datum: GenericRecord = reader.read(null, decoder)
-      return datum
-    } finally {
-      input.close()
-      writer.close()
-    }
-  }
-
-}
diff --git a/integration/spark-common-test/pom.xml b/integration/spark/pom.xml
similarity index 67%
rename from integration/spark-common-test/pom.xml
rename to integration/spark/pom.xml
index eb3f252..531be6f 100644
--- a/integration/spark-common-test/pom.xml
+++ b/integration/spark/pom.xml
@@ -26,27 +26,25 @@
     <relativePath>../../pom.xml</relativePath>
   </parent>
 
-  <artifactId>carbondata-spark-common-test</artifactId>
-  <name>Apache CarbonData :: Spark Common Test</name>
+  <artifactId>carbondata-spark</artifactId>
+  <name>Apache CarbonData :: Spark</name>
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
     <jacoco.append>true</jacoco.append>
+
     <build.directory.projectCommon>../../common/target</build.directory.projectCommon>
     <build.directory.projectCore>../../core/target</build.directory.projectCore>
     <build.directory.projectProcessing>../../processing/target</build.directory.projectProcessing>
     <build.directory.projectHadoop>../../hadoop/target</build.directory.projectHadoop>
     <build.directory.projectFormat>../../format/target</build.directory.projectFormat>
     <build.directory.projectSpark>../../integration/spark/target</build.directory.projectSpark>
-    <build.directory.projectSpark2>../../integration/spark2/target</build.directory.projectSpark2>
-    <build.directory.projectSparkCommon>../../integration/spark-common/target</build.directory.projectSparkCommon>
-    <build.directory.projectSparkCommonTest>../../integration/spark-common-test/target</build.directory.projectSparkCommonTest>
     <!--<build.directory.projectHive>../../integration/hive/target</build.directory.projectHive>-->
     <!--<build.directory.projectPresto>../../integration/presto/target</build.directory.projectPresto>-->
-    <build.directory.projectStoreSdk>../../store/sdk/target</build.directory.projectStoreSdk>
+    <build.directory.projectStoreSdk>../../sdk/sdk/target</build.directory.projectStoreSdk>
     <build.directory.projectStreaming>../../streaming/target</build.directory.projectStreaming>
-    <build.directory.projectBloom>../../datamap/bloom/target</build.directory.projectBloom>
-    <build.directory.projectLucene>../../datamap/lucene/target</build.directory.projectLucene>
+    <build.directory.projectBloom>../../index/bloom/target</build.directory.projectBloom>
+    <build.directory.projectLucene>../../index/lucene/target</build.directory.projectLucene>
 
     <classes.directory.projectCommon>../../common/target/classes</classes.directory.projectCommon>
     <classes.directory.projectCore>../../core/target/classes</classes.directory.projectCore>
@@ -54,15 +52,12 @@
     <classes.directory.projectHadoop>../../hadoop/target/classes</classes.directory.projectHadoop>
     <classes.directory.projectFormat>../../format/target/classes</classes.directory.projectFormat>
     <classes.directory.projectSpark>../../integration/spark/target/classes</classes.directory.projectSpark>
-    <classes.directory.projectSpark2>../../integration/spark2/target/classes</classes.directory.projectSpark2>
-    <classes.directory.projectSparkCommon>../../integration/spark-common/target/classes</classes.directory.projectSparkCommon>
-    <classes.directory.projectSparkCommonTest>../../integration/spark-common-test/target/classes</classes.directory.projectSparkCommonTest>
     <!--<classes.directory.projectHive>../../integration/hive/target/classes</classes.directory.projectHive>-->
     <!--<classes.directory.projectPresto>../../integration/presto/target/classes</classes.directory.projectPresto>-->
-    <classes.directory.projectStoreSdk>../../store/sdk/target/classes</classes.directory.projectStoreSdk>
+    <classes.directory.projectStoreSdk>../../sdk/sdk/target/classes</classes.directory.projectStoreSdk>
     <classes.directory.projectStreaming>../../streaming/target/classes</classes.directory.projectStreaming>
-    <classes.directory.projectBloom>../../datamap/bloom/target/classes</classes.directory.projectBloom>
-    <classes.directory.projectLucene>../../datamap/lucene/target/classes</classes.directory.projectLucene>
+    <classes.directory.projectBloom>../../index/bloom/target/classes</classes.directory.projectBloom>
+    <classes.directory.projectLucene>../../index/lucene/target/classes</classes.directory.projectLucene>
 
     <sources.directory.projectCommon>../../common/src/main/java</sources.directory.projectCommon>
     <sources.directory.projectCore>../../core/src/main/java</sources.directory.projectCore>
@@ -71,19 +66,15 @@
     <sources.directory.projectFormat>../../format/src/main/thrift</sources.directory.projectFormat>
     <sources.directory.projectSpark>../../integration/spark/src/main/scala</sources.directory.projectSpark>
     <sources.directory.projectSpark>../../integration/spark/src/main/java</sources.directory.projectSpark>
-    <sources.directory.projectSpark2>../../integration/spark2/src/main/java</sources.directory.projectSpark2>
-    <sources.directory.projectSpark2>../../integration/spark2/src/main/scala</sources.directory.projectSpark2>
-    <sources.directory.projectSparkCommon>../../integration/spark-common/src/main/java</sources.directory.projectSparkCommon>
-    <sources.directory.projectSparkCommon>../../integration/spark-common/src/main/scala</sources.directory.projectSparkCommon>
     <!--<sources.directory.projectHive>../../integration/hive/src/main/java</sources.directory.projectHive>-->
     <!--<sources.directory.projectHive>../../integration/hive/src/main/scala</sources.directory.projectHive>-->
     <!--<sources.directory.projectPresto>../../integration/presto/src/main/java</sources.directory.projectPresto>-->
     <!--<sources.directory.projectPresto>../../integration/presto/src/main/scala</sources.directory.projectPresto>-->
-    <sources.directory.projectStoreSdk>../../store/sdk/src/main/java</sources.directory.projectStoreSdk>
+    <sources.directory.projectStoreSdk>../../sdk/sdk/src/main/java</sources.directory.projectStoreSdk>
     <sources.directory.projectStreaming>../../streaming/src/main/java</sources.directory.projectStreaming>
     <sources.directory.projectStreaming>../../streaming/src/main/scala</sources.directory.projectStreaming>
-    <sources.directory.projectBloom>../../datamap/bloom/src/main/java</sources.directory.projectBloom>
-    <sources.directory.projectLucene>../../datamap/lucene/src/main/java</sources.directory.projectLucene>
+    <sources.directory.projectBloom>../../index/bloom/src/main/java</sources.directory.projectBloom>
+    <sources.directory.projectLucene>../../index/lucene/src/main/java</sources.directory.projectLucene>
 
     <generated-sources.directory.projectCommon>../../common/target/generated-sources/annotations</generated-sources.directory.projectCommon>
     <generated-sources.directory.projectCore>../../core/target/generated-sources/annotations</generated-sources.directory.projectCore>
@@ -91,81 +82,164 @@
     <generated-sources.directory.projectHadoop>../../hadoop/target/generated-sources/annotations</generated-sources.directory.projectHadoop>
     <generated-sources.directory.projectFormat>../../format/target/generated-sources/annotations</generated-sources.directory.projectFormat>
     <generated-sources.directory.projectSpark>../../integration/spark/target/generated-sources/annotations</generated-sources.directory.projectSpark>
-    <generated-sources.directory.projectSpark2>../../integration/spark2/target/generated-sources/annotations</generated-sources.directory.projectSpark2>
-    <generated-sources.directory.projectSparkCommon>../../integration/spark-common/target/generated-sources/annotations</generated-sources.directory.projectSparkCommon>
-    <generated-sources.directory.projectSparkCommonTest>../../integration/spark-common-test/target/generated-sources/annotations</generated-sources.directory.projectSparkCommonTest>
     <!--<generated-sources.directory.projectHive>../../integration/hive/target/generated-sources/annotations</generated-sources.directory.projectHive>-->
     <!--<generated-sources.directory.projectPresto>../../integration/presto/target/generated-sources/annotations</generated-sources.directory.projectPresto>-->
-    <generated-sources.directory.projectStoreSdk>../../store/sdk/target/generated-sources/annotations</generated-sources.directory.projectStoreSdk>
+    <generated-sources.directory.projectStoreSdk>../../sdk/sdk/target/generated-sources/annotations</generated-sources.directory.projectStoreSdk>
     <generated-sources.directory.projectStreaming>../../streaming/target/generated-sources/annotations</generated-sources.directory.projectStreaming>
-    <generated-sources.directory.projectBloom>../../datamap/bloom/target/generated-sources/annotations</generated-sources.directory.projectBloom>
-    <generated-sources.directory.projectLucene>../../datamap/lucene/target/generated-sources/annotations</generated-sources.directory.projectLucene>
+    <generated-sources.directory.projectBloom>../../index/bloom/target/generated-sources/annotations</generated-sources.directory.projectBloom>
+    <generated-sources.directory.projectLucene>../../index/lucene/target/generated-sources/annotations</generated-sources.directory.projectLucene>
 
   </properties>
 
-
   <dependencies>
+    <!-- carbon -->
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-hive</artifactId>
       <version>${project.version}</version>
       <exclusions>
         <exclusion>
+          <groupId>org.apache.commons</groupId>
+          <artifactId>*</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>org.apache.hive</groupId>
           <artifactId>hive-exec</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-service</artifactId>
+        </exclusion>
       </exclusions>
-      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-cli</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-sdk</artifactId>
+      <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
       <artifactId>carbondata-lucene</artifactId>
       <version>${project.version}</version>
-      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
       <artifactId>carbondata-bloom</artifactId>
       <version>${project.version}</version>
-      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
+      <artifactId>carbondata-geo</artifactId>
       <version>${project.version}</version>
-      <scope>test</scope>
     </dependency>
     <dependency>
-      <!-- spark catalyst added runtime dependency on spark-core,so
-      while executing the testcases spark-core should be present else it
-      will fail to execute -->
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-streaming</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- spark -->
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-repl_${scala.binary.version}</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-core_${scala.binary.version}</artifactId>
-      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-sql_${scala.binary.version}</artifactId>
       <exclusions>
-        <!-- need to Exclude Avro jar from this project,spark core is using
-        the version 1.7.4 which is not compatible with Carbon -->
+        <!-- from transitive dependency com.univocity:univocity-parsers:2.5.9
+        is added from the org.apache.spark:spark-sql_2.11,so need to remove
+        this version.Carbon uses 2.2.1 version  -->
         <exclusion>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
+          <groupId>com.univocity</groupId>
+          <artifactId>univocity-parsers</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
     <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-sql-kafka-0-10_${scala.binary.version}</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>net.jpountz.lz4</groupId>
+          <artifactId>lz4</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-aws</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-annotations</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <version>${httpclient.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>net.java.dev.jets3t</groupId>
+      <artifactId>jets3t</artifactId>
+      <version>0.9.0</version>
+    </dependency>
+    <!-- scala -->
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-compiler</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-reflect</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-library</artifactId>
+    </dependency>
+    <!-- test -->
+    <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <scope>test</scope>
+      <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.jmockit</groupId>
       <artifactId>jmockit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>
@@ -217,15 +291,49 @@
       </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>3.0.0</version>
+        <executions>
+          <execution>
+            <id>compile</id>
+            <phase>compile</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>org.apache.carbondata</groupId>
+                  <artifactId>carbondata-format</artifactId>
+                  <version>${project.version}</version>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>com.google.code.gson</groupId>
+                  <artifactId>gson</artifactId>
+                  <version>2.4</version>
+                </artifactItem>
+                <artifactItem>
+                  <groupId>org.xerial.snappy</groupId>
+                  <artifactId>snappy-java</artifactId>
+                  <version>${snappy.version}</version>
+                </artifactItem>
+              </artifactItems>
+              <outputDirectory>${project.build.directory}/jars</outputDirectory>
+              <!-- other configurations here -->
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
         <version>2.18</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m </argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
-            <spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
           </systemProperties>
           <failIfNoTests>false</failIfNoTests>
         </configuration>
@@ -260,32 +368,6 @@
       </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <!-- Copy the ant tasks jar. Needed for ts.jacoco.report-ant . -->
-          <execution>
-            <id>jacoco-dependency-ant</id>
-            <goals>
-              <goal>copy</goal>
-            </goals>
-            <phase>process-test-resources</phase>
-            <inherited>false</inherited>
-            <configuration>
-              <artifactItems>
-                <artifactItem>
-                  <groupId>org.jacoco</groupId>
-                  <artifactId>org.jacoco.ant</artifactId>
-                  <version>0.7.9</version>
-                </artifactItem>
-              </artifactItems>
-              <stripVersion>true</stripVersion>
-              <outputDirectory>${basedir}/target/jacoco-jars</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>
         <version>1.6</version>
         <executions>
@@ -321,15 +403,6 @@
                     <fileset dir="${build.directory.projectSpark}" erroronmissingdir="false">
                       <include name="jacoco.exec" />
                     </fileset>
-                    <fileset dir="${build.directory.projectSpark2}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectSparkCommon}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectSparkCommonTest}">
-                      <include name="jacoco.exec" />
-                    </fileset>
                     <!--<fileset dir="${build.directory.projectHive}" erroronmissingdir="false">
                       <include name="jacoco.exec" />
                     </fileset>-->
@@ -359,9 +432,6 @@
                         <fileset dir="${classes.directory.projectHadoop}" />
                         <!--<fileset dir="${classes.directory.projectFormat}" erroronmissingdir="false"/>-->
                         <fileset dir="${classes.directory.projectSpark}" erroronmissingdir="false"/>
-                        <fileset dir="${classes.directory.projectSpark2}" erroronmissingdir="false"/>
-                        <fileset dir="${classes.directory.projectSparkCommon}" />
-                        <fileset dir="${classes.directory.projectSparkCommonTest}" />
                         <!--<fileset dir="${classes.directory.projectHive}" erroronmissingdir="false" />-->
                         <!--<fileset dir="${classes.directory.projectPresto}" erroronmissingdir="false" />-->
                         <fileset dir="${classes.directory.projectStoreSdk}" erroronmissingdir="false" />
@@ -376,8 +446,6 @@
                         <fileset dir="${sources.directory.projectHadoop}" />
                         <!--<fileset dir="${sources.directory.projectFormat}" erroronmissingdir="false"/>-->
                         <fileset dir="${sources.directory.projectSpark}" erroronmissingdir="false"/>
-                        <fileset dir="${sources.directory.projectSpark2}" erroronmissingdir="false"/>
-                        <fileset dir="${sources.directory.projectSparkCommon}" />
                         <!--<fileset dir="${sources.directory.projectHive}" erroronmissingdir="false" />-->
                         <!--<fileset dir="${sources.directory.projectPresto}" erroronmissingdir="false" />-->
                         <fileset dir="${sources.directory.projectStoreSdk}" erroronmissingdir="false" />
@@ -418,24 +486,102 @@
       </plugin>
     </plugins>
   </build>
+
   <profiles>
     <profile>
+      <id>build-all</id>
+      <properties>
+        <spark.version>2.3.4</spark.version>
+        <scala.binary.version>2.11</scala.binary.version>
+        <scala.version>2.11.8</scala.version>
+      </properties>
+    </profile>
+    <profile>
       <id>sdvtest</id>
       <properties>
         <maven.test.skip>true</maven.test.skip>
       </properties>
     </profile>
     <profile>
-      <id>build-all</id>
+      <id>spark-2.3</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <properties>
+        <spark.version>2.3.4</spark.version>
+        <scala.binary.version>2.11</scala.binary.version>
+        <scala.version>2.11.8</scala.version>
+      </properties>
       <build>
         <plugins>
           <plugin>
             <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
+            <artifactId>maven-compiler-plugin</artifactId>
             <configuration>
-              <skip>true</skip>
+              <excludes>
+                <exclude>src/main/spark2.4</exclude>
+              </excludes>
             </configuration>
           </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>build-helper-maven-plugin</artifactId>
+            <version>3.0.0</version>
+            <executions>
+              <execution>
+                <id>add-source</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>add-source</goal>
+                </goals>
+                <configuration>
+                  <sources>
+                    <source>src/main/spark2.3</source>
+                  </sources>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
+      <id>spark-2.4</id>
+      <properties>
+        <spark.version>2.4.4</spark.version>
+        <scala.binary.version>2.11</scala.binary.version>
+        <scala.version>2.11.8</scala.version>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-compiler-plugin</artifactId>
+            <configuration>
+              <excludes>
+                <exclude>src/main/spark2.3</exclude>
+              </excludes>
+            </configuration>
+          </plugin>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>build-helper-maven-plugin</artifactId>
+            <version>3.0.0</version>
+            <executions>
+              <execution>
+                <id>add-source</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>add-source</goal>
+                </goals>
+                <configuration>
+                  <sources>
+                    <source>src/main/spark2.4</source>
+                  </sources>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
         </plugins>
       </build>
     </profile>
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/datamap/DataMapManager.java b/integration/spark/src/main/java/org/apache/carbondata/datamap/DataMapManager.java
similarity index 100%
rename from integration/spark2/src/main/java/org/apache/carbondata/datamap/DataMapManager.java
rename to integration/spark/src/main/java/org/apache/carbondata/datamap/DataMapManager.java
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/datamap/IndexDataMapProvider.java b/integration/spark/src/main/java/org/apache/carbondata/datamap/IndexDataMapProvider.java
similarity index 100%
rename from integration/spark2/src/main/java/org/apache/carbondata/datamap/IndexDataMapProvider.java
rename to integration/spark/src/main/java/org/apache/carbondata/datamap/IndexDataMapProvider.java
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/exception/ProcessMetaDataException.java b/integration/spark/src/main/java/org/apache/carbondata/spark/exception/ProcessMetaDataException.java
similarity index 100%
rename from integration/spark-common/src/main/java/org/apache/carbondata/spark/exception/ProcessMetaDataException.java
rename to integration/spark/src/main/java/org/apache/carbondata/spark/exception/ProcessMetaDataException.java
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/load/DecimalSerializableComparator.java b/integration/spark/src/main/java/org/apache/carbondata/spark/load/DecimalSerializableComparator.java
similarity index 100%
rename from integration/spark-common/src/main/java/org/apache/carbondata/spark/load/DecimalSerializableComparator.java
rename to integration/spark/src/main/java/org/apache/carbondata/spark/load/DecimalSerializableComparator.java
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkGenericRowReadSupportImpl.java b/integration/spark/src/main/java/org/apache/carbondata/spark/readsupport/SparkGenericRowReadSupportImpl.java
similarity index 100%
rename from integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkGenericRowReadSupportImpl.java
rename to integration/spark/src/main/java/org/apache/carbondata/spark/readsupport/SparkGenericRowReadSupportImpl.java
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java b/integration/spark/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
similarity index 100%
rename from integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
rename to integration/spark/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/util/Util.java b/integration/spark/src/main/java/org/apache/carbondata/spark/util/Util.java
similarity index 100%
rename from integration/spark-common/src/main/java/org/apache/carbondata/spark/util/Util.java
rename to integration/spark/src/main/java/org/apache/carbondata/spark/util/Util.java
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala b/integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/carbondata/converter/SparkDataTypeConverterImpl.java b/integration/spark/src/main/scala/org/apache/carbondata/converter/SparkDataTypeConverterImpl.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/carbondata/converter/SparkDataTypeConverterImpl.java
rename to integration/spark/src/main/scala/org/apache/carbondata/converter/SparkDataTypeConverterImpl.java
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/datamap/CarbonMergeBloomIndexFilesRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/datamap/CarbonMergeBloomIndexFilesRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/datamap/CarbonMergeBloomIndexFilesRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/datamap/CarbonMergeBloomIndexFilesRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/datamap/IndexDataMapRebuildRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/datamap/TextMatchUDF.scala b/integration/spark/src/main/scala/org/apache/carbondata/datamap/TextMatchUDF.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/datamap/TextMatchUDF.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/datamap/TextMatchUDF.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CacheEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/CacheEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/CacheEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/CacheEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CarbonInitEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/CarbonInitEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/CarbonInitEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/CarbonInitEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CleanFilesEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/CleanFilesEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/CleanFilesEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/CleanFilesEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateCarbonRelationEvent.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/CreateCarbonRelationEvent.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateCarbonRelationEvent.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/CreateCarbonRelationEvent.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateDatabaseEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/CreateDatabaseEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateDatabaseEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/CreateDatabaseEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/CreateTableEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/DataMapEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/DataMapEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/DataMapEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/DataMapEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/DeleteSegmentEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/DeleteSegmentEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/DeleteSegmentEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/DeleteSegmentEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/DropDataMapEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/DropDataMapEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/DropDataMapEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/DropDataMapEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/DropTableEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/DropTableEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/DropTableEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/DropTableEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/Events.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/Events.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/Events.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/IUDEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/IUDEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/IUDEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/IUDEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/IndexServerEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/IndexServerEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/IndexServerEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/IndexServerEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/LookupRelationEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/LookupRelationEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/LookupRelationEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/LookupRelationEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/RefreshTableEvents.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/RefreshTableEvents.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/RefreshTableEvents.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/RefreshTableEvents.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/events/exception/EventExceptions.scala b/integration/spark/src/main/scala/org/apache/carbondata/events/exception/EventExceptions.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/events/exception/EventExceptions.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/events/exception/EventExceptions.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/geo/GeoUtils.scala b/integration/spark/src/main/scala/org/apache/carbondata/geo/GeoUtils.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/geo/GeoUtils.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/geo/GeoUtils.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/geo/InPolygonUDF.scala b/integration/spark/src/main/scala/org/apache/carbondata/geo/InPolygonUDF.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/geo/InPolygonUDF.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/geo/InPolygonUDF.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DataMapJobs.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/DataMapJobs.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DataMapJobs.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/DataMapJobs.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedCountRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedCountRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedCountRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedCountRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedPruneRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedPruneRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedPruneRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedPruneRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedRDDUtils.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedRDDUtils.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedRDDUtils.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedRDDUtils.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedShowCacheRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedShowCacheRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/DistributedShowCacheRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/DistributedShowCacheRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/InvalidateSegmentCacheRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/InvalidateSegmentCacheRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/InvalidateSegmentCacheRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/InvalidateSegmentCacheRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/SegmentPruneRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/indexserver/SegmentPruneRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/indexserver/SegmentPruneRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/indexserver/SegmentPruneRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonColumnValidator.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonColumnValidator.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonColumnValidator.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonColumnValidator.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonSparkFactory.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/InitInputMetrics.java b/integration/spark/src/main/scala/org/apache/carbondata/spark/InitInputMetrics.java
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/InitInputMetrics.java
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/InitInputMetrics.java
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/KeyVal.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/KeyVal.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/KeyVal.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/KeyVal.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/StreamingOption.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/StreamingOption.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/StreamingOption.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/StreamingOption.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/CsvRDDHelper.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/load/CsvRDDHelper.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/CsvRDDHelper.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/load/CsvRDDHelper.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessBuilderOnSpark.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessBuilderOnSpark.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessBuilderOnSpark.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessBuilderOnSpark.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/load/DataLoadProcessorStepOnSpark.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/GlobalSortHelper.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/load/GlobalSortHelper.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/load/GlobalSortHelper.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/load/GlobalSortHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeltaRowScanRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeltaRowScanRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeltaRowScanRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeltaRowScanRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropPartitionRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropPartitionRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropPartitionRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropPartitionRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonIUDMergerRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonIUDMergerRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonIUDMergerRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonIUDMergerRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonScanRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonSparkPartition.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonSparkPartition.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonSparkPartition.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonSparkPartition.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonTableCompactor.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CompactionFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CompactionFactory.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CompactionFactory.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CompactionFactory.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/Compactor.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
similarity index 99%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
index 3d78f0e..bc8453b 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/SparkReadSupport.scala
@@ -21,6 +21,7 @@ import org.apache.carbondata.hadoop.readsupport.CarbonReadSupport
 
 // Used to solve cyclic-dependency issue of carbon-spark-common and carbon-spark, carbon-spark2
 // modules, variables or functions that different in carbon-spark and carbon-spark2 are set here
+// TODO
 object SparkReadSupport {
 
   var readSupportClass: Class[_ <: CarbonReadSupport[_]] = _
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/StreamHandoffRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/UpdateDataLoad.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/thriftserver/CarbonThriftServer.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/thriftserver/CarbonThriftServer.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/thriftserver/CarbonThriftServer.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/thriftserver/CarbonThriftServer.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/util/CarbonSparkUtil.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CarbonSparkUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/util/CarbonSparkUtil.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/util/CarbonSparkUtil.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/util/DataGenerator.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/DataGenerator.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/spark/util/DataGenerator.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/util/DataGenerator.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataTypeConverterUtil.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/util/DataTypeConverterUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataTypeConverterUtil.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/util/DataTypeConverterUtil.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java b/integration/spark/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapper.java
diff --git a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapperDirect.java b/integration/spark/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapperDirect.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapperDirect.java
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/vectorreader/ColumnarVectorWrapperDirect.java
diff --git a/integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java b/integration/spark/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
rename to integration/spark/src/main/scala/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala b/integration/spark/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/store/SparkCarbonStore.scala
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java b/integration/spark/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
rename to integration/spark/src/main/scala/org/apache/carbondata/stream/CarbonStreamRecordReader.java
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala b/integration/spark/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/stream/StreamJobManager.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/streaming/CarbonSparkStreamingListener.scala b/integration/spark/src/main/scala/org/apache/carbondata/streaming/CarbonSparkStreamingListener.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/streaming/CarbonSparkStreamingListener.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/streaming/CarbonSparkStreamingListener.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/streaming/CarbonStreamSparkStreaming.scala b/integration/spark/src/main/scala/org/apache/carbondata/streaming/CarbonStreamSparkStreaming.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/streaming/CarbonStreamSparkStreaming.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/streaming/CarbonStreamSparkStreaming.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/streaming/CarbonStreamingQueryListener.scala b/integration/spark/src/main/scala/org/apache/carbondata/streaming/CarbonStreamingQueryListener.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/streaming/CarbonStreamingQueryListener.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/streaming/CarbonStreamingQueryListener.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/streaming/StreamSinkFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/streaming/StreamSinkFactory.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/carbondata/streaming/StreamSinkFactory.scala
rename to integration/spark/src/main/scala/org/apache/carbondata/streaming/StreamSinkFactory.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/CarbonInputMetrics.scala b/integration/spark/src/main/scala/org/apache/spark/CarbonInputMetrics.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/CarbonInputMetrics.scala
rename to integration/spark/src/main/scala/org/apache/spark/CarbonInputMetrics.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/DataSkewRangePartitioner.scala b/integration/spark/src/main/scala/org/apache/spark/DataSkewRangePartitioner.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/DataSkewRangePartitioner.scala
rename to integration/spark/src/main/scala/org/apache/spark/DataSkewRangePartitioner.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala b/integration/spark/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
rename to integration/spark/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/rdd/DataLoadCoalescedRDD.scala b/integration/spark/src/main/scala/org/apache/spark/rdd/DataLoadCoalescedRDD.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/rdd/DataLoadCoalescedRDD.scala
rename to integration/spark/src/main/scala/org/apache/spark/rdd/DataLoadCoalescedRDD.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/rdd/DataLoadPartitionCoalescer.scala b/integration/spark/src/main/scala/org/apache/spark/rdd/DataLoadPartitionCoalescer.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/rdd/DataLoadPartitionCoalescer.scala
rename to integration/spark/src/main/scala/org/apache/spark/rdd/DataLoadPartitionCoalescer.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonBoundReference.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDataFrameWriter.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDataFrameWriter.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDataFrameWriter.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonDataFrameWriter.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/CarbonDictionaryWrapper.java b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryWrapper.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/CarbonDictionaryWrapper.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryWrapper.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
similarity index 98%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index 7586664..0f52fb7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -354,13 +354,12 @@ object CarbonEnv {
     // for default database and db ends with .db
     // check whether the carbon store and hive store is same or different.
     if (dbName.equals("default") || databaseLocation.endsWith(".db")) {
-      val carbonStorePath =
-        FileFactory.getUpdatedFilePath(CarbonProperties.getStorePath())
-      val hiveStorePath =
-        FileFactory.getUpdatedFilePath(sparkSession.conf.get("spark.sql.warehouse.dir"))
+      val carbonStorePath = FileFactory.getUpdatedFilePath(CarbonProperties.getStorePath())
+      val hiveStorePath = FileFactory.getUpdatedFilePath(
+        sparkSession.conf.get("spark.sql.warehouse.dir", carbonStorePath))
       // if carbon.store does not point to spark.sql.warehouse.dir then follow the old table path
       // format
-      if (!hiveStorePath.equals(carbonStorePath)) {
+      if (carbonStorePath != null && !hiveStorePath.equals(carbonStorePath)) {
         databaseLocation = CarbonProperties.getStorePath +
                            CarbonCommonConstants.FILE_SEPARATOR +
                            dbName
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonExpressions.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonExpressions.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonExpressions.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonExpressions.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonExtensions.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonExtensions.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonExtensions.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonExtensions.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSession.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonSession.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSparkStreamingFactory.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSparkStreamingFactory.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSparkStreamingFactory.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonSparkStreamingFactory.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonUtils.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonUtils.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CarbonUtils.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonUtils.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/CarbonVectorProxy.java b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonVectorProxy.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/CarbonVectorProxy.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/CarbonVectorProxy.java
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/ColumnVectorFactory.java b/integration/spark/src/main/scala/org/apache/spark/sql/ColumnVectorFactory.java
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/ColumnVectorFactory.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/ColumnVectorFactory.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CustomDeterministicExpression.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CustomDeterministicExpression.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/CustomDeterministicExpression.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/CustomDeterministicExpression.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/EnvHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/EnvHelper.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/EnvHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/EnvHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/SQLConf.scala b/integration/spark/src/main/scala/org/apache/spark/sql/SQLConf.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/SQLConf.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/SQLConf.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala b/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndex.scala b/integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndex.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndex.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndex.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndexReplaceRule.scala b/integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndexReplaceRule.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndexReplaceRule.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonFileIndexReplaceRule.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/SparkCarbonFileFormat.scala b/integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/SparkCarbonFileFormat.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/SparkCarbonFileFormat.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/SparkCarbonFileFormat.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/readsupport/SparkUnsafeRowReadSuport.scala b/integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/readsupport/SparkUnsafeRowReadSuport.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/readsupport/SparkUnsafeRowReadSuport.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/readsupport/SparkUnsafeRowReadSuport.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/AbstractCarbonSparkSQLParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/AbstractCarbonSparkSQLParser.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/AbstractCarbonSparkSQLParser.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/catalyst/AbstractCarbonSparkSQLParser.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonParserUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonParserUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonParserUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonParserUtil.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/analysis/EmptyRule.scala b/integration/spark/src/main/scala/org/apache/spark/sql/catalyst/analysis/EmptyRule.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/analysis/EmptyRule.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/catalyst/analysis/EmptyRule.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/events/MergeBloomIndexEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/events/MergeIndexEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/CastExpressionOptimization.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/CastExpressionOptimization.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/CastExpressionOptimization.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/CastExpressionOptimization.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonDropCacheCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonDropCacheCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonDropCacheCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonDropCacheCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonShowCacheCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonShowCacheCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonShowCacheCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/cache/CarbonShowCacheCommand.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonCreateDataMapCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapRebuildCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapRebuildCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapRebuildCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapRebuildCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDropDataMapCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableCompactionCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonAlterTableFinishStreaming.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCleanFilesCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCliCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCliCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCliCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonCliCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByIdCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByIdCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByIdCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByIdCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByLoadDateCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByLoadDateCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByLoadDateCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteLoadByLoadDateCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteStageFilesCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteStageFilesCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteStageFilesCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonDeleteStageFilesCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertFromStageCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertFromStageCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertFromStageCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertFromStageCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoHadoopFsRelationCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoHadoopFsRelationCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoHadoopFsRelationCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoHadoopFsRelationCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoWithDf.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoWithDf.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoWithDf.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonInsertIntoWithDf.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadParams.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadParams.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadParams.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadParams.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowLoadsCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowLoadsCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowLoadsCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonShowLoadsCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/CommonLoadUtils.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/management/RefreshCarbonTableCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForDeleteCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonProjectForUpdateCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/CarbonTruncateCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompaction.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompactionException.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompactionException.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompactionException.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/HorizontalCompactionException.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/IUDCommonUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/IUDCommonUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/IUDCommonUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/IUDCommonUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetException.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetException.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetException.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/CarbonMergeDataSetException.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/HistoryTableLoadHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/HistoryTableLoadHelper.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/HistoryTableLoadHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/HistoryTableLoadHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeDataSetBuilder.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeDataSetBuilder.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeDataSetBuilder.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeDataSetBuilder.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeProjection.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeProjection.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeProjection.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MergeProjection.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MutationAction.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MutationAction.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MutationAction.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/MutationAction.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/TranxManager.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/TranxManager.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/TranxManager.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/TranxManager.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/interfaces.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/interfaces.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/interfaces.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/merge/interfaces.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/package.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/package.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/package.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableAddHivePartitionCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableAddHivePartitionCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableAddHivePartitionCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableAddHivePartitionCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropHivePartitionCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropHivePartitionCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropHivePartitionCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/partition/CarbonAlterTableDropHivePartitionCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableAddColumnCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableSetCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableSetCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableSetCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableSetCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableUnsetCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableUnsetCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableUnsetCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableUnsetCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonCreateStreamCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonCreateStreamCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonCreateStreamCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonCreateStreamCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonDropStreamCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonDropStreamCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonDropStreamCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonDropStreamCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonShowStreamsCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonShowStreamsCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonShowStreamsCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonShowStreamsCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateDataSourceTableCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateDataSourceTableCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateDataSourceTableCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateDataSourceTableCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableAsSelectCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableLikeCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableLikeCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableLikeCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableLikeCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDropTableCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonExplainCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonExplainCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonExplainCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonExplainCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowCreateTableCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowCreateTableCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowCreateTableCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowCreateTableCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonPlanHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonPlanHelper.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonPlanHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonPlanHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
similarity index 97%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
index 4dc4397..8af08cd 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
@@ -32,6 +32,7 @@ import org.apache.spark.sql.execution.datasources.{LogicalRelation, RefreshResou
 import org.apache.spark.sql.hive.CarbonRelation
 import org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand
 import org.apache.spark.sql.parser.{CarbonSpark2SqlParser, CarbonSparkSqlParserUtil}
+import org.apache.spark.sql.types.DecimalType
 import org.apache.spark.util.{CarbonReflectionUtils, FileUtils}
 
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
@@ -250,14 +251,20 @@ object DDLHelper {
       if (!columnName.equalsIgnoreCase(newColumn.name)) {
         isColumnRename = true
       }
+      val values = newColumn.dataType match {
+        case d: DecimalType => Some(List((d.precision, d.scale)))
+        case _ => None
+      }
+      val dataTypeInfo = CarbonParserUtil.parseDataType(
+        DataTypeConverterUtil
+          .convertToCarbonType(newColumn.dataType.typeName)
+          .getName
+          .toLowerCase,
+        values,
+        isColumnRename)
       val alterTableColRenameAndDataTypeChangeModel =
         AlterTableDataTypeChangeModel(
-          DataTypeInfo(
-            DataTypeConverterUtil
-              .convertToCarbonType(newColumn.dataType.typeName)
-              .getName
-              .toLowerCase
-          ),
+          dataTypeInfo,
           tableName.database.map(_.toLowerCase),
           tableName.table.toLowerCase,
           columnName.toLowerCase,
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
similarity index 97%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index c4f5456a..401e8f9 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -90,11 +90,14 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
         ExecutedCommandExec(DDLHelper.addColumns(addColumns, sparkSession)) :: Nil
       case addColumn: CarbonAlterTableAddColumnCommand =>
         CarbonPlanHelper.addColumn(addColumn, sparkSession)
-      case dropColumn: CarbonAlterTableDropColumnCommand
-        if isCarbonTable(TableIdentifier(
+      case dropColumn: CarbonAlterTableDropColumnCommand =>
+        if (isCarbonTable(TableIdentifier(
           dropColumn.alterTableDropColumnModel.tableName,
-          dropColumn.alterTableDropColumnModel.databaseName)) =>
-        CarbonPlanHelper.dropColumn(dropColumn, sparkSession)
+          dropColumn.alterTableDropColumnModel.databaseName))) {
+          CarbonPlanHelper.dropColumn(dropColumn, sparkSession)
+        } else {
+          throw new UnsupportedOperationException("Only carbondata table support drop column")
+        }
       case AlterTableSetLocationCommand(tableName, _, _)
         if isCarbonTable(tableName) =>
         throw new UnsupportedOperationException("Set partition location is not supported")
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DMLHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DMLHelper.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DMLHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DMLHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/MixedFormatHandler.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/MixedFormatHandler.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/MixedFormatHandler.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/MixedFormatHandler.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/PushDownHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/PushDownHelper.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/PushDownHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/PushDownHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/StreamingTableStrategy.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/StreamingTableStrategy.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/StreamingTableStrategy.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/StreamingTableStrategy.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/execution/streaming/CarbonAppendableStreamSink.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalyzer.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalyzer.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalyzer.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalyzer.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMVRules.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMVRules.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMVRules.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMVRules.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetaStore.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonRelation.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonRelation.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonRelation.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonRelation.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalog.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalog.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalog.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalog.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSessionUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSqlAstBuilder.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSqlAstBuilder.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSqlAstBuilder.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSqlAstBuilder.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSqlConf.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSqlConf.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonSqlConf.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSqlConf.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CreateCarbonSourceTableAsSelectCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CreateCarbonSourceTableAsSelectCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/CreateCarbonSourceTableAsSelectCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/CreateCarbonSourceTableAsSelectCommand.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/hive/DistributionUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/DistributionUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/hive/DistributionUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/DistributionUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/cli/CarbonSQLCLIDriver.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/cli/CarbonSQLCLIDriver.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/cli/CarbonSQLCLIDriver.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/cli/CarbonSQLCLIDriver.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonResetCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonResetCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonResetCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonResetCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/listeners/DropCacheEventListeners.scala b/integration/spark/src/main/scala/org/apache/spark/sql/listeners/DropCacheEventListeners.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/listeners/DropCacheEventListeners.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/listeners/DropCacheEventListeners.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/listeners/MVListeners.scala b/integration/spark/src/main/scala/org/apache/spark/sql/listeners/MVListeners.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/listeners/MVListeners.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/listeners/MVListeners.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/listeners/PrePrimingListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/listeners/PrePrimingListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/listeners/PrePrimingListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/listeners/PrePrimingListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/listeners/ShowCacheEventListeners.scala b/integration/spark/src/main/scala/org/apache/spark/sql/listeners/ShowCacheEventListeners.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/listeners/ShowCacheEventListeners.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/listeners/ShowCacheEventListeners.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala b/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonIUDRule.scala b/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonIUDRule.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonIUDRule.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonIUDRule.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonUDFTransformRule.scala b/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonUDFTransformRule.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonUDFTransformRule.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonUDFTransformRule.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
similarity index 82%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
index 6632762..ff75638 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSpark2SqlParser.scala
@@ -34,18 +34,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 class CarbonExtensionSpark2SqlParser extends CarbonSpark2SqlParser {
 
   override protected lazy val extendedSparkSyntax: Parser[LogicalPlan] =
-    loadDataNew | alterTableColumnRenameAndModifyDataType | alterTableAddColumns
-
-  /**
-   * alter table change column with options
-   */
-  override protected lazy val alterTableColumnRenameAndModifyDataType: Parser[LogicalPlan] =
-    ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ CHANGE ~ ident ~ ident ~
-    ident ~ ("(" ~> rep1sep(valueOptions, ",") <~ ")") <~ opt(";") ^^ {
-      case dbName ~ table ~ change ~ columnName ~ columnNameCopy ~ dataType ~ values =>
-        CarbonSparkSqlParserUtil.alterTableColumnRenameAndModifyDataType(
-          dbName, table, columnName, columnNameCopy, dataType, Option(values))
-    }
+    loadDataNew | alterTableAddColumns
 
   /**
    * alter table add columns with TBLPROPERTIES
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSqlParser.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSqlParser.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonExtensionSqlParser.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
similarity index 99%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
index ef7c903..02c1a5b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
@@ -538,7 +538,7 @@ object CarbonSparkSqlParserUtil {
    * @return returns <true> if lower case conversion is needed else <false>
    */
   def needToConvertToLowerCase(key: String): Boolean = {
-    val noConvertList = Array(CarbonCommonConstants.COMPRESSOR, "PATH")
+    val noConvertList = Array(CarbonCommonConstants.COMPRESSOR, "PATH", "bad_record_path")
     !noConvertList.exists(x => x.equalsIgnoreCase(key))
   }
 
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/Profiler.scala b/integration/spark/src/main/scala/org/apache/spark/sql/profiler/Profiler.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/Profiler.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/profiler/Profiler.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/ProfilerListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/profiler/ProfilerListener.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/ProfilerListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/profiler/ProfilerListener.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala b/integration/spark/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
similarity index 99%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
index 438365e..40ab68c 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/profiler/ProfilerLogger.scala
@@ -319,7 +319,7 @@ private[profiler] class ExecutionSummary(
    *     +------------------------------------------------------------------------------------------------------------------+
    *     |query_id: 23737310772188                                                                                          |
    *     |table_name: default.carbon_table                                                                                  |
-   *     |table_path: /carbondata/examples/spark2/target/store/default/carbon_table                                         |
+   *     |table_path: /carbondata/examples/spark/target/store/default/carbon_table                                         |
    *     |start_time: 2018-03-22 17:12:23.141                                                                               |
    *     |total_time: 152ms [(prepare inputFormat 55ms)~(getSplits 90ms)~(distributeSplits 4ms)~(3ms)]                      |
    *     |valid_segment_num: 1                                                                                              |
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/BlockletDataMapDetailsWithSchema.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/BlockletDataMapDetailsWithSchema.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/BlockletDataMapDetailsWithSchema.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/BlockletDataMapDetailsWithSchema.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/CarbonBlockLoaderHelper.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/CarbonBlockLoaderHelper.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/CarbonBlockLoaderHelper.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/CarbonBlockLoaderHelper.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/DistributableBlockletDataMapLoader.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/DistributableBlockletDataMapLoader.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/DistributableBlockletDataMapLoader.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/DistributableBlockletDataMapLoader.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/SparkBlockletDataMapLoaderJob.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/SparkBlockletDataMapLoaderJob.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/SparkBlockletDataMapLoaderJob.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/Jobs/SparkBlockletDataMapLoaderJob.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/DropIndexCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/DropIndexCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/DropIndexCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/DropIndexCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/RegisterIndexTableCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/RegisterIndexTableCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/RegisterIndexTableCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/RegisterIndexTableCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SICreationCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/SILoadCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SILoadCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/SILoadCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SILoadCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/SIRebuildSegmentCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SIRebuildSegmentCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/SIRebuildSegmentCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/SIRebuildSegmentCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/ShowIndexesCommand.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/ShowIndexesCommand.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/command/ShowIndexesCommand.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/command/ShowIndexesCommand.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableColumnRenameEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableColumnRenameEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableColumnRenameEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableColumnRenameEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableCompactionPostEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableCompactionPostEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableCompactionPostEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableCompactionPostEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableDropColumnEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableDropColumnEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableDropColumnEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableDropColumnEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableMergeIndexSIEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableMergeIndexSIEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableMergeIndexSIEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableMergeIndexSIEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableRenameEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableRenameEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableRenameEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/AlterTableRenameEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/CleanFilesPostEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/CleanFilesPostEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/CleanFilesPostEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/CleanFilesPostEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/CreateCarbonRelationEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/CreateCarbonRelationEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/CreateCarbonRelationEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/CreateCarbonRelationEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteFromTableEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteFromTableEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteFromTableEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteFromTableEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByDateListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByDateListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByDateListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByDateListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByIdListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByIdListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByIdListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DeleteSegmentByIdListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DropCacheSIEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DropCacheSIEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/DropCacheSIEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/DropCacheSIEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/LoadSIEvents.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/LoadSIEvents.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/LoadSIEvents.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/LoadSIEvents.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIDropEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIDropEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIDropEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIDropEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListenerForFailedSegments.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListenerForFailedSegments.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListenerForFailedSegments.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SILoadEventListenerForFailedSegments.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIRefreshEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIRefreshEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIRefreshEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/SIRefreshEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/ShowCacheSIEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/ShowCacheSIEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/ShowCacheSIEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/ShowCacheSIEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/UpdateTablePreEventListener.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/UpdateTablePreEventListener.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/events/UpdateTablePreEventListener.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/events/UpdateTablePreEventListener.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/exception/IndexTableExistException.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/exception/IndexTableExistException.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/exception/IndexTableExistException.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/exception/IndexTableExistException.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/exception/SecondaryIndexException.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/exception/SecondaryIndexException.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/exception/SecondaryIndexException.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/exception/SecondaryIndexException.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
similarity index 99%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
index ffc4d68..0d192c1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
@@ -188,7 +188,7 @@ object CarbonInternalMetastore {
         }
       } catch {
         case e: Exception =>
-          // In case of creating a table, hivetable will not be available.
+          // In case of creating a table, hive table will not be available.
           LOGGER.error(e.getMessage, e)
       }
     }
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/joins/BroadCastSIFilterPushJoin.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/joins/BroadCastSIFilterPushJoin.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/joins/BroadCastSIFilterPushJoin.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/joins/BroadCastSIFilterPushJoin.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/load/CarbonInternalLoaderUtil.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/load/Compactor.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/load/Compactor.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/load/Compactor.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/load/Compactor.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/load/RowComparatorWithOutKettle.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/load/RowComparatorWithOutKettle.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/load/RowComparatorWithOutKettle.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/load/RowComparatorWithOutKettle.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonCostBasedOptimizer.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonCostBasedOptimizer.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonCostBasedOptimizer.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonCostBasedOptimizer.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSITransformationRule.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSITransformationRule.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSITransformationRule.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSITransformationRule.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/query/CarbonSecondaryIndexExecutor.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/query/CarbonSecondaryIndexExecutor.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/query/CarbonSecondaryIndexExecutor.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/query/CarbonSecondaryIndexExecutor.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/query/SecondaryIndexQueryResultProcessor.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/query/SecondaryIndexQueryResultProcessor.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/query/SecondaryIndexQueryResultProcessor.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/query/SecondaryIndexQueryResultProcessor.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSIRebuildRDD.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSIRebuildRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSIRebuildRDD.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSIRebuildRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSecondaryIndexRDD.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSecondaryIndexRDD.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSecondaryIndexRDD.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/CarbonSecondaryIndexRDD.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/SecondaryIndexCreator.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/SecondaryIndexCreator.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/SecondaryIndexCreator.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/rdd/SecondaryIndexCreator.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/CarbonInternalScalaUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/CarbonInternalScalaUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/CarbonInternalScalaUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/CarbonInternalScalaUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/FileInternalUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/FileInternalUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/FileInternalUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/FileInternalUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/IndexTableUtil.java b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/IndexTableUtil.java
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/IndexTableUtil.java
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/IndexTableUtil.java
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/InternalKeyVal.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/InternalKeyVal.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/InternalKeyVal.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/InternalKeyVal.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/SecondaryIndexUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/SecondaryIndexUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/secondaryindex/util/SecondaryIndexUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/util/SecondaryIndexUtil.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/test/ResourceRegisterAndCopier.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
similarity index 82%
rename from integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
index a9d9dbe..371b894 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/test/SparkTestQueryExecutor.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.test
 
 import org.apache.spark.SparkConf
 import org.apache.spark.sql._
+import org.apache.spark.sql.carbondata.execution.datasources.CarbonFileIndexReplaceRule
 import org.apache.spark.sql.test.TestQueryExecutor.{hdfsUrl, integrationPath, warehouse}
 
 import org.apache.carbondata.common.logging.LogServiceFactory
@@ -30,21 +31,18 @@ import org.apache.carbondata.core.util.CarbonProperties
  * This class is a sql executor of unit test case for spark version 2.x.
  */
 
-class Spark2TestQueryExecutor extends TestQueryExecutorRegister {
+class SparkTestQueryExecutor extends TestQueryExecutorRegister {
 
-  override def sql(sqlText: String): DataFrame = Spark2TestQueryExecutor.spark.sql(sqlText)
+  override def sql(sqlText: String): DataFrame = SparkTestQueryExecutor.spark.sql(sqlText)
 
-  override def sqlContext: SQLContext = Spark2TestQueryExecutor.spark.sqlContext
+  override def sqlContext: SQLContext = SparkTestQueryExecutor.spark.sqlContext
 
-  override def stop(): Unit = Spark2TestQueryExecutor.spark.stop()
+  override def stop(): Unit = SparkTestQueryExecutor.spark.stop()
 }
 
-object Spark2TestQueryExecutor {
+object SparkTestQueryExecutor {
   private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
-  LOGGER.info("use TestQueryExecutorImplV2")
-  CarbonProperties.getInstance()
-    .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FORCE")
-
+  LOGGER.info("use TestQueryExecutorImpl")
   val conf = new SparkConf()
   if (!TestQueryExecutor.masterUrl.startsWith("local")) {
     conf.setJars(TestQueryExecutor.jars).
@@ -61,26 +59,27 @@ object Spark2TestQueryExecutor {
     conf.set("spark.hadoop.hive.metastore.uris",
       System.getProperty("spark.hadoop.hive.metastore.uris"))
   }
-  val metaStoreDB = s"$integrationPath/spark-common-cluster-test/target"
   val extensions = CarbonProperties
     .getInstance()
     .getProperty("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
   val spark = SparkSession
-    .builder().config(conf)
+    .builder()
+    .config(conf)
     .master(TestQueryExecutor.masterUrl)
-    .appName("Spark2TestQueryExecutor")
+    .appName("SparkTestQueryExecutor")
     .enableHiveSupport()
     .config("spark.sql.warehouse.dir", warehouse)
     .config("spark.sql.crossJoin.enabled", "true")
     .config("spark.sql.extensions", extensions)
     .getOrCreate()
+  spark.experimental.extraOptimizations = Seq(new CarbonFileIndexReplaceRule)
   CarbonEnv.getInstance(spark)
   if (warehouse.startsWith("hdfs://")) {
     System.setProperty(CarbonCommonConstants.HDFS_TEMP_LOCATION, warehouse)
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOCK_TYPE,
       CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS)
     ResourceRegisterAndCopier.
-      copyResourcesifNotExists(hdfsUrl, s"$integrationPath/spark-common-test/src/test/resources",
+      copyResourcesifNotExists(hdfsUrl, s"$integrationPath/spark/src/test/resources",
         s"$integrationPath//spark-common-cluster-test/src/test/resources/testdatafileslist.txt")
   }
   FileFactory.getConfiguration.
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
similarity index 87%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
index bdc92c0..9267bbd 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
@@ -31,7 +31,7 @@ import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 
 /**
- * the sql executor of spark-common-test
+ * the sql executor of spark
  */
 trait TestQueryExecutorRegister {
   def sql(sqlText: String): DataFrame
@@ -51,19 +51,21 @@ object TestQueryExecutor {
     // Check whether it is integration module
     var isIntegrationModule = path.indexOf("/integration/") > -1
     // Get the local target folder path
-    val targetPath = path.substring(0, path.lastIndexOf("/target/") + 8)
+    val targetPath = path.substring(0, path.lastIndexOf("/target/") + 7)
     // Get the relative project path
     val projectPathLocal = if (isIntegrationModule) {
       path.substring(0, path.indexOf("/integration/"))
-    } else if (path.indexOf("/datamap/") > -1) {
-      path.substring(0, path.indexOf("/datamap/"))
+    } else if (path.indexOf("/mv/") > -1) {
+      path.substring(0, path.indexOf("/mv/"))
+    } else if (path.indexOf("/secondary-index/") > -1) {
+      isIntegrationModule = true
+      path.substring(0, path.indexOf("/index/"))
+    } else if (path.indexOf("/index/") > -1) {
+      path.substring(0, path.indexOf("/index/"))
     } else if (path.indexOf("/tools/") > -1) {
       path.substring(0, path.indexOf("/tools/"))
     } else if (path.indexOf("/examples/") > -1) {
       path.substring(0, path.indexOf("/examples/"))
-    } else if (path.indexOf("/secondary_index/") > -1) {
-      isIntegrationModule = true
-      path.substring(0, path.indexOf("/secondary_index/"))
     } else {
       path
     }
@@ -72,13 +74,12 @@ object TestQueryExecutor {
   LOGGER.info(s"project path: $projectPath")
   val integrationPath = s"$projectPath/integration"
   val target = if (isIntegrationModule) {
-    // If integration module , always point to spark-common/target location
-    s"$integrationPath/spark-common/target"
+    // If integration module , always point to spark/target location
+    s"$integrationPath/spark/target"
   } else {
     // Otherwise point to respective target folder location
     localTarget
   }
-  val metaStoreDB = target
   val location = s"$target/dbpath"
   val masterUrl = {
     val property = System.getProperty("spark.master.url")
@@ -101,7 +102,7 @@ object TestQueryExecutor {
   val resourcesPath = if (hdfsUrl.startsWith("hdfs://")) {
     hdfsUrl
   } else {
-    s"$integrationPath/spark-common-test/src/test/resources"
+    s"$integrationPath/spark/src/test/resources"
   }
 
   val warehouse = if (hdfsUrl.startsWith("hdfs://")) {
@@ -148,12 +149,10 @@ object TestQueryExecutor {
     TestQueryExecutor.projectPath + "/core/target",
     TestQueryExecutor.projectPath + "/hadoop/target",
     TestQueryExecutor.projectPath + "/processing/target",
-    TestQueryExecutor.projectPath + "/integration/spark-datasource/target",
-    TestQueryExecutor.projectPath + "/integration/spark-common/target",
-    TestQueryExecutor.projectPath + "/integration/spark2/target",
-    TestQueryExecutor.projectPath + "/integration/spark-common/target/jars",
+    TestQueryExecutor.projectPath + "/integration/spark/target",
+    TestQueryExecutor.projectPath + "/integration/spark/target/jars",
     TestQueryExecutor.projectPath + "/streaming/target",
-    TestQueryExecutor.projectPath + "/store/sdk/target")
+    TestQueryExecutor.projectPath + "/sdk/sdk/target")
 
   lazy val jars = {
     val jarsLocal = new ArrayBuffer[String]()
@@ -175,6 +174,7 @@ object TestQueryExecutor {
     .addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, "1024")
     .addProperty(CarbonCommonConstants.CARBON_MAX_EXECUTOR_LRU_CACHE_SIZE, "1024")
     .addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION, systemFolderPath)
+    .addProperty(CarbonCommonConstants.CARBON_MINMAX_ALLOWED_BYTE_COUNT, "40")
 
   private def lookupQueryExecutor: Class[_] = {
     ServiceLoader.load(classOf[TestQueryExecutorRegister], Utils.getContextOrSparkClassLoader)
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/CarbonFunSuite.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/util/CarbonFunSuite.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/CarbonFunSuite.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/test/util/CarbonFunSuite.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/PlanTest.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/util/PlanTest.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/PlanTest.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/test/util/PlanTest.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala b/integration/spark/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
similarity index 66%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
index 5af48ef..2c2e370 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
@@ -21,14 +21,15 @@ import java.util.{Locale, TimeZone}
 
 import scala.collection.JavaConverters._
 
-import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+import org.apache.spark.sql.{CarbonEnv, CarbonToSparkAdapter, DataFrame, Row, SQLContext}
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.util._
 import org.apache.spark.sql.test.TestQueryExecutor
 
 import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.core.cache.CacheProvider
+import org.apache.carbondata.core.constants.{CarbonCommonConstants, CarbonLoadOptionConstants}
+import org.apache.carbondata.core.util.{CarbonProperties, SessionParams, ThreadLocalSessionInfo}
 
 
 
@@ -113,14 +114,95 @@ class QueryTest extends PlanTest {
   def sql(sqlText: String): DataFrame = TestQueryExecutor.INSTANCE.sql(sqlText)
 
   val sqlContext: SQLContext = TestQueryExecutor.INSTANCE.sqlContext
+  val hiveClient = CarbonToSparkAdapter.getHiveExternalCatalog(sqlContext.sparkSession).client
 
+  lazy val projectPath = TestQueryExecutor.projectPath
   lazy val warehouse = TestQueryExecutor.warehouse
   lazy val storeLocation = warehouse
   val resourcesPath = TestQueryExecutor.resourcesPath
-  val metaStoreDB = TestQueryExecutor.metaStoreDB
+  val target = TestQueryExecutor.target
   val integrationPath = TestQueryExecutor.integrationPath
   val dblocation = TestQueryExecutor.location
   val defaultParallelism = sqlContext.sparkContext.defaultParallelism
+
+  def defaultConfig(): Unit = {
+    CarbonEnv.getInstance(sqlContext.sparkSession).carbonSessionInfo.getSessionParams.clear()
+    ThreadLocalSessionInfo.unsetAll()
+    Option(CacheProvider.getInstance().getCarbonCache).map(_.clear())
+    CarbonProperties.getInstance()
+      .addProperty("enable.unsafe.sort", "true")
+      .removeProperty(CarbonCommonConstants.LOAD_SORT_SCOPE)
+      .removeProperty(CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE)
+    sqlContext.setConf("enable.unsafe.sort", "true")
+    sqlContext.setConf("carbon.options.sort.scope", "NO_SORT")
+    sqlContext.setConf(
+      CarbonLoadOptionConstants.CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD,
+      CarbonLoadOptionConstants.CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD_DEFAULT)
+    sqlContext.setConf("carbon.custom.block.distribution", "block")
+    sqlContext.setConf("carbon.options.bad.records.action", "FORCE")
+    sqlContext.setConf("carbon.options.bad.records.logger.enable", "false")
+
+    Seq(
+      CarbonCommonConstants.LOAD_SORT_SCOPE,
+      "carbon.enable.vector.reader",
+      "carbon.push.rowfilters.for.vector",
+      "spark.sql.sources.commitProtocolClass").foreach { key =>
+      sqlContext.sparkSession.conf.unset(key)
+    }
+  }
+
+  def printConfiguration(): Unit = {
+    CarbonProperties.getInstance().print()
+    LOGGER.error("------spark conf--------------------------")
+    LOGGER.error(sqlContext.sessionState.conf.getAllConfs
+      .map(x => x._1 + "=" + x._2).mkString(", "))
+    val sessionParams =
+      CarbonEnv.getInstance(sqlContext.sparkSession).carbonSessionInfo.getSessionParams.getAll
+    LOGGER.error("------CarbonEnv sessionParam--------------------------")
+    LOGGER.error(sessionParams.asScala.map(x => x._1 + "=" + x._2).mkString(", "))
+  }
+
+  def printTable(table: String, database: String = "default"): Unit = {
+    sql("SELECT current_database()").show(100, false)
+    sql(s"describe formatted ${ database }.${ table }").show(100, false)
+  }
+
+  def setCarbonProperties(propertiesString: String): Unit = {
+    val properties = propertiesString.split(", ", -1)
+    val exclude = Set("carbon.system.folder.location",
+      "carbon.badRecords.location",
+      "carbon.storelocation")
+    properties.foreach { property =>
+      val entry = property.split("=")
+      if (!exclude.contains(entry(0))) {
+        CarbonProperties.getInstance().addProperty(entry(0), entry(1))
+      }
+    }
+  }
+
+  def confSparkSession(confString: String): Unit = {
+    val confs = confString.split(", ", -1)
+    val exclude = Set("spark.sql.warehouse.dir",
+      "carbon.options.bad.record.path",
+      "spark.sql.catalogImplementation",
+      "spark.sql.extensions",
+      "spark.app.name",
+      "spark.driver.host",
+      "spark.driver.port",
+      "spark.executor.id",
+      "spark.master",
+      "spark.app.id")
+    confs.foreach { conf =>
+      val entry = conf.split("=")
+      if (!exclude.contains(entry(0))) {
+        if (entry.length == 2) {
+          sqlContext.sessionState.conf.setConfString(entry(0), entry(1))
+        } else {
+          sqlContext.sessionState.conf.setConfString(entry(0), "")
+        }
+      }
+    }
+  }
 }
 
 object QueryTest {
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/util/CarbonException.scala b/integration/spark/src/main/scala/org/apache/spark/sql/util/CarbonException.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/util/CarbonException.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/util/CarbonException.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/util/CarbonMetastoreTypes.scala b/integration/spark/src/main/scala/org/apache/spark/sql/util/CarbonMetastoreTypes.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/util/CarbonMetastoreTypes.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/util/CarbonMetastoreTypes.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/util/SparkSQLUtil.scala b/integration/spark/src/main/scala/org/apache/spark/sql/util/SparkSQLUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/sql/util/SparkSQLUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/util/SparkSQLUtil.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/util/SparkTypeConverter.scala b/integration/spark/src/main/scala/org/apache/spark/sql/util/SparkTypeConverter.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/sql/util/SparkTypeConverter.scala
rename to integration/spark/src/main/scala/org/apache/spark/sql/util/SparkTypeConverter.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala b/integration/spark/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala b/integration/spark/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala b/integration/spark/src/main/scala/org/apache/spark/util/CleanFiles.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/CleanFiles.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala b/integration/spark/src/main/scala/org/apache/spark/util/Compaction.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/Compaction.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala b/integration/spark/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala b/integration/spark/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/util/FileUtils.scala b/integration/spark/src/main/scala/org/apache/spark/util/FileUtils.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/util/FileUtils.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/FileUtils.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/MergeIndexUtil.scala b/integration/spark/src/main/scala/org/apache/spark/util/MergeIndexUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/MergeIndexUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/MergeIndexUtil.scala
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/util/ScalaCompilerUtil.scala b/integration/spark/src/main/scala/org/apache/spark/util/ScalaCompilerUtil.scala
similarity index 100%
rename from integration/spark-common/src/main/scala/org/apache/spark/util/ScalaCompilerUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/ScalaCompilerUtil.scala
diff --git a/integration/spark-datasource/src/main/scala/org/apache/spark/util/SparkUtil.scala b/integration/spark/src/main/scala/org/apache/spark/util/SparkUtil.scala
similarity index 100%
rename from integration/spark-datasource/src/main/scala/org/apache/spark/util/SparkUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/SparkUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala b/integration/spark/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/TableAPIUtil.scala
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala b/integration/spark/src/main/scala/org/apache/spark/util/TableLoader.scala
similarity index 100%
rename from integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
rename to integration/spark/src/main/scala/org/apache/spark/util/TableLoader.scala
diff --git a/integration/spark-common/src/main/spark2.3/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala b/integration/spark/src/main/spark2.3/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala
similarity index 100%
rename from integration/spark-common/src/main/spark2.3/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala
rename to integration/spark/src/main/spark2.3/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala
diff --git a/integration/spark2/src/main/spark2.3/org/apache/spark/sql/CarbonBoundReference.scala b/integration/spark/src/main/spark2.3/org/apache/spark/sql/CarbonBoundReference.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.3/org/apache/spark/sql/CarbonBoundReference.scala
rename to integration/spark/src/main/spark2.3/org/apache/spark/sql/CarbonBoundReference.scala
diff --git a/integration/spark2/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala b/integration/spark/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
similarity index 94%
rename from integration/spark2/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
rename to integration/spark/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
index 436b2c7..b084235 100644
--- a/integration/spark2/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
+++ b/integration/spark/src/main/spark2.3/org/apache/spark/sql/CarbonToSparkAdapter.scala
@@ -21,14 +21,15 @@ import java.net.URI
 
 import org.apache.spark.SparkContext
 import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
+import org.apache.spark.sql.carbondata.execution.datasources.CarbonFileIndexReplaceRule
 import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, SessionCatalog}
+import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference, AttributeSet, Expression, ExpressionSet, ExprId, NamedExpression, ScalaUDF, SubqueryExpression}
 import org.apache.spark.sql.catalyst.expressions.codegen.ExprCode
-import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference, AttributeSet, ExprId, Expression, ExpressionSet, NamedExpression, ScalaUDF, SubqueryExpression}
 import org.apache.spark.sql.catalyst.optimizer.Optimizer
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation}
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.execution.command.ExplainCommand
-import org.apache.spark.sql.hive.{CarbonMVRules, HiveExternalCatalog}
+import org.apache.spark.sql.hive.HiveExternalCatalog
 import org.apache.spark.sql.optimizer.{CarbonIUDRule, CarbonUDFTransformRule}
 import org.apache.spark.sql.secondaryindex.optimizer.CarbonSITransformationRule
 import org.apache.spark.sql.types.{DataType, Metadata}
@@ -139,7 +140,7 @@ class CarbonOptimizer(
     optimizer: Optimizer) extends Optimizer(catalog) {
 
   private lazy val iudRule = Batch("IUD Optimizers", fixedPoint,
-    Seq(new CarbonIUDRule(), new CarbonUDFTransformRule()): _*)
+    Seq(new CarbonIUDRule(), new CarbonUDFTransformRule(), new CarbonFileIndexReplaceRule()): _*)
 
   private lazy val secondaryIndexRule = Batch("SI Optimizers", Once,
     Seq(new CarbonSITransformationRule(session)): _*)
diff --git a/integration/spark2/src/main/spark2.3/org/apache/spark/sql/MixedFormatHandlerUtil.scala b/integration/spark/src/main/spark2.3/org/apache/spark/sql/MixedFormatHandlerUtil.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.3/org/apache/spark/sql/MixedFormatHandlerUtil.scala
rename to integration/spark/src/main/spark2.3/org/apache/spark/sql/MixedFormatHandlerUtil.scala
diff --git a/integration/spark2/src/main/spark2.3/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala b/integration/spark/src/main/spark2.3/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.3/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala
rename to integration/spark/src/main/spark2.3/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala
diff --git a/integration/spark2/src/main/spark2.3/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala b/integration/spark/src/main/spark2.3/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.3/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala
rename to integration/spark/src/main/spark2.3/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala
diff --git a/integration/spark-common/src/main/spark2.4/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala b/integration/spark/src/main/spark2.4/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala
similarity index 100%
rename from integration/spark-common/src/main/spark2.4/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala
rename to integration/spark/src/main/spark2.4/org/apache/carbondata/spark/adapter/CarbonToSparkAdapter.scala
diff --git a/integration/spark2/src/main/spark2.4/org/apache/spark/sql/CarbonBoundReference.scala b/integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonBoundReference.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.4/org/apache/spark/sql/CarbonBoundReference.scala
rename to integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonBoundReference.scala
diff --git a/integration/spark2/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala b/integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
similarity index 96%
rename from integration/spark2/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
rename to integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
index 4a0af0d..76ee6d7 100644
--- a/integration/spark2/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
+++ b/integration/spark/src/main/spark2.4/org/apache/spark/sql/CarbonToSparkAdapter.scala
@@ -21,8 +21,9 @@ import java.net.URI
 
 import org.apache.spark.SparkContext
 import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
+import org.apache.spark.sql.carbondata.execution.datasources.CarbonFileIndexReplaceRule
 import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, ExternalCatalogWithListener, SessionCatalog}
-import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference, AttributeSet, ExprId, Expression, ExpressionSet, NamedExpression, ScalaUDF, SubqueryExpression}
+import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference, AttributeSet, Expression, ExpressionSet, ExprId, NamedExpression, ScalaUDF, SubqueryExpression}
 import org.apache.spark.sql.catalyst.expressions.codegen._
 import org.apache.spark.sql.catalyst.expressions.codegen.Block._
 import org.apache.spark.sql.catalyst.optimizer.Optimizer
@@ -173,7 +174,7 @@ class CarbonOptimizer(
     optimizer: Optimizer) extends Optimizer(catalog) {
 
   private lazy val iudRule = Batch("IUD Optimizers", fixedPoint,
-    Seq(new CarbonIUDRule(), new CarbonUDFTransformRule()): _*)
+    Seq(new CarbonIUDRule(), new CarbonUDFTransformRule(), new CarbonFileIndexReplaceRule()): _*)
 
   private lazy val secondaryIndexRule = Batch("SI Optimizers", Once,
     Seq(new CarbonSITransformationRule(session)): _*)
diff --git a/integration/spark2/src/main/spark2.4/org/apache/spark/sql/MixedFormatHandlerUtil.scala b/integration/spark/src/main/spark2.4/org/apache/spark/sql/MixedFormatHandlerUtil.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.4/org/apache/spark/sql/MixedFormatHandlerUtil.scala
rename to integration/spark/src/main/spark2.4/org/apache/spark/sql/MixedFormatHandlerUtil.scala
diff --git a/integration/spark2/src/main/spark2.4/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala b/integration/spark/src/main/spark2.4/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.4/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala
rename to integration/spark/src/main/spark2.4/org/apache/spark/sql/execution/strategy/CarbonDataSourceScan.scala
diff --git a/integration/spark2/src/main/spark2.4/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala b/integration/spark/src/main/spark2.4/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala
similarity index 100%
rename from integration/spark2/src/main/spark2.4/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala
rename to integration/spark/src/main/spark2.4/org/apache/spark/sql/hive/CarbonSessionStateBuilder.scala
diff --git a/integration/spark-datasource/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister b/integration/spark/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
similarity index 95%
rename from integration/spark-datasource/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
rename to integration/spark/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
index 619ffd1..ddb858b 100644
--- a/integration/spark-datasource/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
+++ b/integration/spark/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
@@ -14,4 +14,5 @@
 ## See the License for the specific language governing permissions and
 ## limitations under the License.
 ## ------------------------------------------------------------------------
+org.apache.spark.sql.CarbonSource
 org.apache.spark.sql.carbondata.execution.datasources.SparkCarbonFileFormat
\ No newline at end of file
diff --git a/integration/spark2/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister b/integration/spark/src/resources/META-INF/services/org.apache.spark.sql.test.TestQueryExecutorRegister
similarity index 95%
rename from integration/spark2/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
rename to integration/spark/src/resources/META-INF/services/org.apache.spark.sql.test.TestQueryExecutorRegister
index d09c9b5..fc96db4 100644
--- a/integration/spark2/src/resources/META-INF/services/org.apache.spark.sql.sources.DataSourceRegister
+++ b/integration/spark/src/resources/META-INF/services/org.apache.spark.sql.test.TestQueryExecutorRegister
@@ -14,4 +14,4 @@
 ## See the License for the specific language governing permissions and
 ## limitations under the License.
 ## ------------------------------------------------------------------------
-org.apache.spark.sql.CarbonSource
\ No newline at end of file
+org.apache.spark.sql.test.SparkTestQueryExecutor
\ No newline at end of file
diff --git a/integration/spark-common-test/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java b/integration/spark/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java
similarity index 100%
rename from integration/spark-common-test/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java
rename to integration/spark/src/test/java/org/apache/carbondata/sdk/util/BinaryUtil.java
diff --git a/integration/spark2/src/test/java/org/apache/carbondata/stream/CarbonStreamRecordReaderTest.java b/integration/spark/src/test/java/org/apache/carbondata/stream/CarbonStreamRecordReaderTest.java
similarity index 100%
rename from integration/spark2/src/test/java/org/apache/carbondata/stream/CarbonStreamRecordReaderTest.java
rename to integration/spark/src/test/java/org/apache/carbondata/stream/CarbonStreamRecordReaderTest.java
diff --git a/integration/spark-common-test/src/test/resources/100_olap.csv b/integration/spark/src/test/resources/100_olap.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/100_olap.csv
rename to integration/spark/src/test/resources/100_olap.csv
diff --git a/integration/spark-common-test/src/test/resources/10dim_4msr.csv b/integration/spark/src/test/resources/10dim_4msr.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/10dim_4msr.csv
rename to integration/spark/src/test/resources/10dim_4msr.csv
diff --git a/integration/spark-common-test/src/test/resources/32000char.csv b/integration/spark/src/test/resources/32000char.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/32000char.csv
rename to integration/spark/src/test/resources/32000char.csv
diff --git a/integration/spark-datasource/src/test/resources/Array.csv b/integration/spark/src/test/resources/Array.csv
similarity index 100%
rename from integration/spark-datasource/src/test/resources/Array.csv
rename to integration/spark/src/test/resources/Array.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/T_Hive1.csv b/integration/spark/src/test/resources/IUD/T_Hive1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/T_Hive1.csv
rename to integration/spark/src/test/resources/IUD/T_Hive1.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/bad_record.csv b/integration/spark/src/test/resources/IUD/bad_record.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/bad_record.csv
rename to integration/spark/src/test/resources/IUD/bad_record.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/badrecord.csv b/integration/spark/src/test/resources/IUD/badrecord.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/badrecord.csv
rename to integration/spark/src/test/resources/IUD/badrecord.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp1.csv b/integration/spark/src/test/resources/IUD/comp1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/comp1.csv
rename to integration/spark/src/test/resources/IUD/comp1.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp2.csv b/integration/spark/src/test/resources/IUD/comp2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/comp2.csv
rename to integration/spark/src/test/resources/IUD/comp2.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp3.csv b/integration/spark/src/test/resources/IUD/comp3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/comp3.csv
rename to integration/spark/src/test/resources/IUD/comp3.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp4.csv b/integration/spark/src/test/resources/IUD/comp4.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/comp4.csv
rename to integration/spark/src/test/resources/IUD/comp4.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/dest.csv b/integration/spark/src/test/resources/IUD/dest.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/dest.csv
rename to integration/spark/src/test/resources/IUD/dest.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/negativevalue.csv b/integration/spark/src/test/resources/IUD/negativevalue.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/negativevalue.csv
rename to integration/spark/src/test/resources/IUD/negativevalue.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/other.csv b/integration/spark/src/test/resources/IUD/other.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/other.csv
rename to integration/spark/src/test/resources/IUD/other.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/sample.csv b/integration/spark/src/test/resources/IUD/sample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/sample.csv
rename to integration/spark/src/test/resources/IUD/sample.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/sample_updated.csv b/integration/spark/src/test/resources/IUD/sample_updated.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/sample_updated.csv
rename to integration/spark/src/test/resources/IUD/sample_updated.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/source2.csv b/integration/spark/src/test/resources/IUD/source2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/source2.csv
rename to integration/spark/src/test/resources/IUD/source2.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/source3.csv b/integration/spark/src/test/resources/IUD/source3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/source3.csv
rename to integration/spark/src/test/resources/IUD/source3.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/update01.csv b/integration/spark/src/test/resources/IUD/update01.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/update01.csv
rename to integration/spark/src/test/resources/IUD/update01.csv
diff --git a/integration/spark-common-test/src/test/resources/OLDFORMATTABLE.csv b/integration/spark/src/test/resources/OLDFORMATTABLE.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/OLDFORMATTABLE.csv
rename to integration/spark/src/test/resources/OLDFORMATTABLE.csv
diff --git a/integration/spark-common-test/src/test/resources/OLDFORMATTABLEHIVE.csv b/integration/spark/src/test/resources/OLDFORMATTABLEHIVE.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/OLDFORMATTABLEHIVE.csv
rename to integration/spark/src/test/resources/OLDFORMATTABLEHIVE.csv
diff --git a/integration/spark-common-test/src/test/resources/Struct.csv b/integration/spark/src/test/resources/Struct.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/Struct.csv
rename to integration/spark/src/test/resources/Struct.csv
diff --git a/integration/spark-common-test/src/test/resources/StructofStruct.csv b/integration/spark/src/test/resources/StructofStruct.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/StructofStruct.csv
rename to integration/spark/src/test/resources/StructofStruct.csv
diff --git a/integration/spark-common-test/src/test/resources/Test_Data1_Logrithmic.csv b/integration/spark/src/test/resources/Test_Data1_Logrithmic.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/Test_Data1_Logrithmic.csv
rename to integration/spark/src/test/resources/Test_Data1_Logrithmic.csv
diff --git a/integration/spark-common-test/src/test/resources/adap.csv b/integration/spark/src/test/resources/adap.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap.csv
rename to integration/spark/src/test/resources/adap.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_double1.csv b/integration/spark/src/test/resources/adap_double1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_double1.csv
rename to integration/spark/src/test/resources/adap_double1.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_double2.csv b/integration/spark/src/test/resources/adap_double2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_double2.csv
rename to integration/spark/src/test/resources/adap_double2.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_double3.csv b/integration/spark/src/test/resources/adap_double3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_double3.csv
rename to integration/spark/src/test/resources/adap_double3.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_double4.csv b/integration/spark/src/test/resources/adap_double4.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_double4.csv
rename to integration/spark/src/test/resources/adap_double4.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_int1.csv b/integration/spark/src/test/resources/adap_int1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_int1.csv
rename to integration/spark/src/test/resources/adap_int1.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_int2.csv b/integration/spark/src/test/resources/adap_int2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_int2.csv
rename to integration/spark/src/test/resources/adap_int2.csv
diff --git a/integration/spark-common-test/src/test/resources/adap_int3.csv b/integration/spark/src/test/resources/adap_int3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/adap_int3.csv
rename to integration/spark/src/test/resources/adap_int3.csv
diff --git a/integration/spark-common-test/src/test/resources/alldatatypeforpartition.csv b/integration/spark/src/test/resources/alldatatypeforpartition.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/alldatatypeforpartition.csv
rename to integration/spark/src/test/resources/alldatatypeforpartition.csv
diff --git a/integration/spark-common-test/src/test/resources/alldictionary/complex/20160423/1400_1405/complex.dictionary b/integration/spark/src/test/resources/alldictionary/complex/20160423/1400_1405/complex.dictionary
similarity index 100%
rename from integration/spark-common-test/src/test/resources/alldictionary/complex/20160423/1400_1405/complex.dictionary
rename to integration/spark/src/test/resources/alldictionary/complex/20160423/1400_1405/complex.dictionary
diff --git a/integration/spark-common-test/src/test/resources/alldictionary/sample/20160423/1400_1405/sample.dictionary b/integration/spark/src/test/resources/alldictionary/sample/20160423/1400_1405/sample.dictionary
similarity index 100%
rename from integration/spark-common-test/src/test/resources/alldictionary/sample/20160423/1400_1405/sample.dictionary
rename to integration/spark/src/test/resources/alldictionary/sample/20160423/1400_1405/sample.dictionary
diff --git a/integration/spark-common-test/src/test/resources/array1.csv b/integration/spark/src/test/resources/array1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/array1.csv
rename to integration/spark/src/test/resources/array1.csv
diff --git a/integration/spark-common-test/src/test/resources/arrayColumnEmpty.csv b/integration/spark/src/test/resources/arrayColumnEmpty.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/arrayColumnEmpty.csv
rename to integration/spark/src/test/resources/arrayColumnEmpty.csv
diff --git a/integration/spark-common-test/src/test/resources/avgTest.csv b/integration/spark/src/test/resources/avgTest.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/avgTest.csv
rename to integration/spark/src/test/resources/avgTest.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/bigtab.csv b/integration/spark/src/test/resources/badrecords/bigtab.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/bigtab.csv
rename to integration/spark/src/test/resources/badrecords/bigtab.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/bigtabbad.csv b/integration/spark/src/test/resources/badrecords/bigtabbad.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/bigtabbad.csv
rename to integration/spark/src/test/resources/badrecords/bigtabbad.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/complexdata.csv b/integration/spark/src/test/resources/badrecords/complexdata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/complexdata.csv
rename to integration/spark/src/test/resources/badrecords/complexdata.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/datasample.csv b/integration/spark/src/test/resources/badrecords/datasample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/datasample.csv
rename to integration/spark/src/test/resources/badrecords/datasample.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/dummy.csv b/integration/spark/src/test/resources/badrecords/dummy.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/dummy.csv
rename to integration/spark/src/test/resources/badrecords/dummy.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/dummy2.csv b/integration/spark/src/test/resources/badrecords/dummy2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/dummy2.csv
rename to integration/spark/src/test/resources/badrecords/dummy2.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/emptyTimeStampValue.csv b/integration/spark/src/test/resources/badrecords/emptyTimeStampValue.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/emptyTimeStampValue.csv
rename to integration/spark/src/test/resources/badrecords/emptyTimeStampValue.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/emptyValues.csv b/integration/spark/src/test/resources/badrecords/emptyValues.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/emptyValues.csv
rename to integration/spark/src/test/resources/badrecords/emptyValues.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/insufficientColumns.csv b/integration/spark/src/test/resources/badrecords/insufficientColumns.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/insufficientColumns.csv
rename to integration/spark/src/test/resources/badrecords/insufficientColumns.csv
diff --git a/integration/spark-common-test/src/test/resources/badrecords/seriazableValue.csv b/integration/spark/src/test/resources/badrecords/seriazableValue.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/badrecords/seriazableValue.csv
rename to integration/spark/src/test/resources/badrecords/seriazableValue.csv
diff --git a/integration/spark-common-test/src/test/resources/bigIntData.csv b/integration/spark/src/test/resources/bigIntData.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/bigIntData.csv
rename to integration/spark/src/test/resources/bigIntData.csv
diff --git a/integration/spark-common-test/src/test/resources/bigIntDataWithHeader.csv b/integration/spark/src/test/resources/bigIntDataWithHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/bigIntDataWithHeader.csv
rename to integration/spark/src/test/resources/bigIntDataWithHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/bigIntDataWithoutHeader.csv b/integration/spark/src/test/resources/bigIntDataWithoutHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/bigIntDataWithoutHeader.csv
rename to integration/spark/src/test/resources/bigIntDataWithoutHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/big_decimal_without_header.csv b/integration/spark/src/test/resources/big_decimal_without_header.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/big_decimal_without_header.csv
rename to integration/spark/src/test/resources/big_decimal_without_header.csv
diff --git a/integration/spark-common-test/src/test/resources/big_int_Decimal.csv b/integration/spark/src/test/resources/big_int_Decimal.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/big_int_Decimal.csv
rename to integration/spark/src/test/resources/big_int_Decimal.csv
diff --git a/integration/spark-common-test/src/test/resources/binaryDataBase64.csv b/integration/spark/src/test/resources/binaryDataBase64.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/binaryDataBase64.csv
rename to integration/spark/src/test/resources/binaryDataBase64.csv
diff --git a/integration/spark-common-test/src/test/resources/binaryDataHex.csv b/integration/spark/src/test/resources/binaryDataHex.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/binaryDataHex.csv
rename to integration/spark/src/test/resources/binaryDataHex.csv
diff --git a/integration/spark-common-test/src/test/resources/binaryStringNullData.csv b/integration/spark/src/test/resources/binaryStringNullData.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/binaryStringNullData.csv
rename to integration/spark/src/test/resources/binaryStringNullData.csv
diff --git a/integration/spark-common-test/src/test/resources/binarystringdata.csv b/integration/spark/src/test/resources/binarystringdata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/binarystringdata.csv
rename to integration/spark/src/test/resources/binarystringdata.csv
diff --git a/integration/spark-common-test/src/test/resources/binarystringdata2.csv b/integration/spark/src/test/resources/binarystringdata2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/binarystringdata2.csv
rename to integration/spark/src/test/resources/binarystringdata2.csv
diff --git a/integration/spark-common-test/src/test/resources/binarystringdatawithHead.csv b/integration/spark/src/test/resources/binarystringdatawithHead.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/binarystringdatawithHead.csv
rename to integration/spark/src/test/resources/binarystringdatawithHead.csv
diff --git a/integration/spark2/src/test/resources/bool/supportBoolean.csv b/integration/spark/src/test/resources/bool/supportBoolean.csv
similarity index 100%
rename from integration/spark2/src/test/resources/bool/supportBoolean.csv
rename to integration/spark/src/test/resources/bool/supportBoolean.csv
diff --git a/integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv b/integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv
similarity index 100%
rename from integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv
rename to integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv
diff --git a/integration/spark2/src/test/resources/bool/supportBooleanDifferentFormat.csv b/integration/spark/src/test/resources/bool/supportBooleanDifferentFormat.csv
similarity index 100%
rename from integration/spark2/src/test/resources/bool/supportBooleanDifferentFormat.csv
rename to integration/spark/src/test/resources/bool/supportBooleanDifferentFormat.csv
diff --git a/integration/spark2/src/test/resources/bool/supportBooleanOnlyBoolean.csv b/integration/spark/src/test/resources/bool/supportBooleanOnlyBoolean.csv
similarity index 100%
rename from integration/spark2/src/test/resources/bool/supportBooleanOnlyBoolean.csv
rename to integration/spark/src/test/resources/bool/supportBooleanOnlyBoolean.csv
diff --git a/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv b/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv
similarity index 100%
rename from integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv
rename to integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv
diff --git a/integration/spark2/src/test/resources/bool/supportBooleanWithFileHeader.csv b/integration/spark/src/test/resources/bool/supportBooleanWithFileHeader.csv
similarity index 100%
rename from integration/spark2/src/test/resources/bool/supportBooleanWithFileHeader.csv
rename to integration/spark/src/test/resources/bool/supportBooleanWithFileHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/channelsId.csv b/integration/spark/src/test/resources/channelsId.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/channelsId.csv
rename to integration/spark/src/test/resources/channelsId.csv
diff --git a/integration/spark-common-test/src/test/resources/character_carbon.csv b/integration/spark/src/test/resources/character_carbon.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/character_carbon.csv
rename to integration/spark/src/test/resources/character_carbon.csv
diff --git a/integration/spark-common-test/src/test/resources/character_hive.csv b/integration/spark/src/test/resources/character_hive.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/character_hive.csv
rename to integration/spark/src/test/resources/character_hive.csv
diff --git a/integration/spark-common-test/src/test/resources/columndictionary/country.csv b/integration/spark/src/test/resources/columndictionary/country.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/columndictionary/country.csv
rename to integration/spark/src/test/resources/columndictionary/country.csv
diff --git a/integration/spark-common-test/src/test/resources/columndictionary/name.csv b/integration/spark/src/test/resources/columndictionary/name.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/columndictionary/name.csv
rename to integration/spark/src/test/resources/columndictionary/name.csv
diff --git a/integration/spark-common-test/src/test/resources/comment.csv b/integration/spark/src/test/resources/comment.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/comment.csv
rename to integration/spark/src/test/resources/comment.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compaction1.csv b/integration/spark/src/test/resources/compaction/compaction1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compaction1.csv
rename to integration/spark/src/test/resources/compaction/compaction1.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compaction1_forhive.csv b/integration/spark/src/test/resources/compaction/compaction1_forhive.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compaction1_forhive.csv
rename to integration/spark/src/test/resources/compaction/compaction1_forhive.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compaction2.csv b/integration/spark/src/test/resources/compaction/compaction2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compaction2.csv
rename to integration/spark/src/test/resources/compaction/compaction2.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compaction3.csv b/integration/spark/src/test/resources/compaction/compaction3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compaction3.csv
rename to integration/spark/src/test/resources/compaction/compaction3.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compactionIUD1.csv b/integration/spark/src/test/resources/compaction/compactionIUD1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compactionIUD1.csv
rename to integration/spark/src/test/resources/compaction/compactionIUD1.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compactionIUD2.csv b/integration/spark/src/test/resources/compaction/compactionIUD2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compactionIUD2.csv
rename to integration/spark/src/test/resources/compaction/compactionIUD2.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compactionIUD3.csv b/integration/spark/src/test/resources/compaction/compactionIUD3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compactionIUD3.csv
rename to integration/spark/src/test/resources/compaction/compactionIUD3.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compactionIUD4.csv b/integration/spark/src/test/resources/compaction/compactionIUD4.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compactionIUD4.csv
rename to integration/spark/src/test/resources/compaction/compactionIUD4.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compactioncard2.csv b/integration/spark/src/test/resources/compaction/compactioncard2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compactioncard2.csv
rename to integration/spark/src/test/resources/compaction/compactioncard2.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/compactioncard2_forhive.csv b/integration/spark/src/test/resources/compaction/compactioncard2_forhive.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/compactioncard2_forhive.csv
rename to integration/spark/src/test/resources/compaction/compactioncard2_forhive.csv
diff --git a/integration/spark-common-test/src/test/resources/compaction/nodictionary_compaction.csv b/integration/spark/src/test/resources/compaction/nodictionary_compaction.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/compaction/nodictionary_compaction.csv
rename to integration/spark/src/test/resources/compaction/nodictionary_compaction.csv
diff --git a/integration/spark-common-test/src/test/resources/complexTypeDecimal.csv b/integration/spark/src/test/resources/complexTypeDecimal.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexTypeDecimal.csv
rename to integration/spark/src/test/resources/complexTypeDecimal.csv
diff --git a/integration/spark-common-test/src/test/resources/complexTypeDecimalNested.csv b/integration/spark/src/test/resources/complexTypeDecimalNested.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexTypeDecimalNested.csv
rename to integration/spark/src/test/resources/complexTypeDecimalNested.csv
diff --git a/integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv b/integration/spark/src/test/resources/complexTypeDecimalNestedHive.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv
rename to integration/spark/src/test/resources/complexTypeDecimalNestedHive.csv
diff --git a/integration/spark-common-test/src/test/resources/complexbinary.csv b/integration/spark/src/test/resources/complexbinary.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexbinary.csv
rename to integration/spark/src/test/resources/complexbinary.csv
diff --git a/integration/spark-common-test/src/test/resources/complexdata.csv b/integration/spark/src/test/resources/complexdata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexdata.csv
rename to integration/spark/src/test/resources/complexdata.csv
diff --git a/integration/spark-common-test/src/test/resources/complexdata1.csv b/integration/spark/src/test/resources/complexdata1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexdata1.csv
rename to integration/spark/src/test/resources/complexdata1.csv
diff --git a/integration/spark-common-test/src/test/resources/complexdata2.csv b/integration/spark/src/test/resources/complexdata2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexdata2.csv
rename to integration/spark/src/test/resources/complexdata2.csv
diff --git a/integration/spark-common-test/src/test/resources/complexdata3.csv b/integration/spark/src/test/resources/complexdata3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexdata3.csv
rename to integration/spark/src/test/resources/complexdata3.csv
diff --git a/integration/spark-common-test/src/test/resources/complexdatareordered.csv b/integration/spark/src/test/resources/complexdatareordered.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexdatareordered.csv
rename to integration/spark/src/test/resources/complexdatareordered.csv
diff --git a/integration/spark-common-test/src/test/resources/complexdatastructextra.csv b/integration/spark/src/test/resources/complexdatastructextra.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complexdatastructextra.csv
rename to integration/spark/src/test/resources/complexdatastructextra.csv
diff --git a/integration/spark-common-test/src/test/resources/complextypediffentcolheaderorder.csv b/integration/spark/src/test/resources/complextypediffentcolheaderorder.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complextypediffentcolheaderorder.csv
rename to integration/spark/src/test/resources/complextypediffentcolheaderorder.csv
diff --git a/integration/spark-common-test/src/test/resources/complextypesample.csv b/integration/spark/src/test/resources/complextypesample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complextypesample.csv
rename to integration/spark/src/test/resources/complextypesample.csv
diff --git a/integration/spark-common-test/src/test/resources/complextypespecialchardelimiter.csv b/integration/spark/src/test/resources/complextypespecialchardelimiter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/complextypespecialchardelimiter.csv
rename to integration/spark/src/test/resources/complextypespecialchardelimiter.csv
diff --git a/integration/spark-common-test/src/test/resources/data.csv b/integration/spark/src/test/resources/data.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data.csv
rename to integration/spark/src/test/resources/data.csv
diff --git a/integration/spark-common-test/src/test/resources/data1.csv b/integration/spark/src/test/resources/data1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data1.csv
rename to integration/spark/src/test/resources/data1.csv
diff --git a/integration/spark-common-test/src/test/resources/data2.csv b/integration/spark/src/test/resources/data2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data2.csv
rename to integration/spark/src/test/resources/data2.csv
diff --git a/integration/spark-common-test/src/test/resources/data2_DiffTimeFormat.csv b/integration/spark/src/test/resources/data2_DiffTimeFormat.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data2_DiffTimeFormat.csv
rename to integration/spark/src/test/resources/data2_DiffTimeFormat.csv
diff --git a/integration/spark-common-test/src/test/resources/dataIncrement.csv b/integration/spark/src/test/resources/dataIncrement.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataIncrement.csv
rename to integration/spark/src/test/resources/dataIncrement.csv
diff --git a/integration/spark-common-test/src/test/resources/dataWithEmptyRows.csv b/integration/spark/src/test/resources/dataWithEmptyRows.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataWithEmptyRows.csv
rename to integration/spark/src/test/resources/dataWithEmptyRows.csv
diff --git a/integration/spark-common-test/src/test/resources/dataWithNegativeValues.csv b/integration/spark/src/test/resources/dataWithNegativeValues.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataWithNegativeValues.csv
rename to integration/spark/src/test/resources/dataWithNegativeValues.csv
diff --git a/integration/spark-common-test/src/test/resources/dataWithNullFirstLine.csv b/integration/spark/src/test/resources/dataWithNullFirstLine.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataWithNullFirstLine.csv
rename to integration/spark/src/test/resources/dataWithNullFirstLine.csv
diff --git a/integration/spark-common-test/src/test/resources/dataWithSingleQuote.csv b/integration/spark/src/test/resources/dataWithSingleQuote.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataWithSingleQuote.csv
rename to integration/spark/src/test/resources/dataWithSingleQuote.csv
diff --git a/integration/spark-common-test/src/test/resources/data_alltypes.csv b/integration/spark/src/test/resources/data_alltypes.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_alltypes.csv
rename to integration/spark/src/test/resources/data_alltypes.csv
diff --git a/integration/spark-common-test/src/test/resources/data_beyond68yrs.csv b/integration/spark/src/test/resources/data_beyond68yrs.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_beyond68yrs.csv
rename to integration/spark/src/test/resources/data_beyond68yrs.csv
diff --git a/integration/spark-common-test/src/test/resources/data_big.csv b/integration/spark/src/test/resources/data_big.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_big.csv
rename to integration/spark/src/test/resources/data_big.csv
diff --git a/integration/spark-common-test/src/test/resources/data_partition_badrecords.csv b/integration/spark/src/test/resources/data_partition_badrecords.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_partition_badrecords.csv
rename to integration/spark/src/test/resources/data_partition_badrecords.csv
diff --git a/integration/spark-common-test/src/test/resources/data_sort.csv b/integration/spark/src/test/resources/data_sort.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_sort.csv
rename to integration/spark/src/test/resources/data_sort.csv
diff --git a/integration/spark-common-test/src/test/resources/data_timestamp.csv b/integration/spark/src/test/resources/data_timestamp.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_timestamp.csv
rename to integration/spark/src/test/resources/data_timestamp.csv
diff --git a/integration/spark-common-test/src/test/resources/data_withCAPSHeader.csv b/integration/spark/src/test/resources/data_withCAPSHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_withCAPSHeader.csv
rename to integration/spark/src/test/resources/data_withCAPSHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/data_withMixedHeader.csv b/integration/spark/src/test/resources/data_withMixedHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_withMixedHeader.csv
rename to integration/spark/src/test/resources/data_withMixedHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/data_with_all_types.csv b/integration/spark/src/test/resources/data_with_all_types.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_with_all_types.csv
rename to integration/spark/src/test/resources/data_with_all_types.csv
diff --git a/integration/spark-common-test/src/test/resources/data_with_special_char.csv b/integration/spark/src/test/resources/data_with_special_char.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/data_with_special_char.csv
rename to integration/spark/src/test/resources/data_with_special_char.csv
diff --git a/integration/spark-common-test/src/test/resources/datadelimiter.csv b/integration/spark/src/test/resources/datadelimiter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datadelimiter.csv
rename to integration/spark/src/test/resources/datadelimiter.csv
diff --git a/integration/spark-common-test/src/test/resources/datanullmeasurecol.csv b/integration/spark/src/test/resources/datanullmeasurecol.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datanullmeasurecol.csv
rename to integration/spark/src/test/resources/datanullmeasurecol.csv
diff --git a/integration/spark-common-test/src/test/resources/dataretention1.csv b/integration/spark/src/test/resources/dataretention1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataretention1.csv
rename to integration/spark/src/test/resources/dataretention1.csv
diff --git a/integration/spark-common-test/src/test/resources/dataretention11.csv b/integration/spark/src/test/resources/dataretention11.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataretention11.csv
rename to integration/spark/src/test/resources/dataretention11.csv
diff --git a/integration/spark-common-test/src/test/resources/dataretention2.csv b/integration/spark/src/test/resources/dataretention2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataretention2.csv
rename to integration/spark/src/test/resources/dataretention2.csv
diff --git a/integration/spark-common-test/src/test/resources/dataretention3.csv b/integration/spark/src/test/resources/dataretention3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dataretention3.csv
rename to integration/spark/src/test/resources/dataretention3.csv
diff --git a/integration/spark-common-test/src/test/resources/datasample.csv b/integration/spark/src/test/resources/datasample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datasample.csv
rename to integration/spark/src/test/resources/datasample.csv
diff --git a/integration/spark-common-test/src/test/resources/datasamplecomplex.csv b/integration/spark/src/test/resources/datasamplecomplex.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datasamplecomplex.csv
rename to integration/spark/src/test/resources/datasamplecomplex.csv
diff --git a/integration/spark-common-test/src/test/resources/datasamplefordate.csv b/integration/spark/src/test/resources/datasamplefordate.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datasamplefordate.csv
rename to integration/spark/src/test/resources/datasamplefordate.csv
diff --git a/integration/spark-common-test/src/test/resources/datasamplenull.csv b/integration/spark/src/test/resources/datasamplenull.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datasamplenull.csv
rename to integration/spark/src/test/resources/datasamplenull.csv
diff --git a/integration/spark-common-test/src/test/resources/datasingleCol.csv b/integration/spark/src/test/resources/datasingleCol.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datasingleCol.csv
rename to integration/spark/src/test/resources/datasingleCol.csv
diff --git a/integration/spark-common-test/src/test/resources/datasingleComplexCol.csv b/integration/spark/src/test/resources/datasingleComplexCol.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datasingleComplexCol.csv
rename to integration/spark/src/test/resources/datasingleComplexCol.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithNegeativewithoutHeader.csv b/integration/spark/src/test/resources/datawithNegeativewithoutHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithNegeativewithoutHeader.csv
rename to integration/spark/src/test/resources/datawithNegeativewithoutHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithNegtiveNumber.csv b/integration/spark/src/test/resources/datawithNegtiveNumber.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithNegtiveNumber.csv
rename to integration/spark/src/test/resources/datawithNegtiveNumber.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithbackslash.csv b/integration/spark/src/test/resources/datawithbackslash.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithbackslash.csv
rename to integration/spark/src/test/resources/datawithbackslash.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithblanklines.csv b/integration/spark/src/test/resources/datawithblanklines.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithblanklines.csv
rename to integration/spark/src/test/resources/datawithblanklines.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithcomplexspecialchar.csv b/integration/spark/src/test/resources/datawithcomplexspecialchar.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithcomplexspecialchar.csv
rename to integration/spark/src/test/resources/datawithcomplexspecialchar.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithescapecharacter.csv b/integration/spark/src/test/resources/datawithescapecharacter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithescapecharacter.csv
rename to integration/spark/src/test/resources/datawithescapecharacter.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithmaxbigint.csv b/integration/spark/src/test/resources/datawithmaxbigint.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithmaxbigint.csv
rename to integration/spark/src/test/resources/datawithmaxbigint.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithmaxinteger.csv b/integration/spark/src/test/resources/datawithmaxinteger.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithmaxinteger.csv
rename to integration/spark/src/test/resources/datawithmaxinteger.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithmaxminbigint.csv b/integration/spark/src/test/resources/datawithmaxminbigint.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithmaxminbigint.csv
rename to integration/spark/src/test/resources/datawithmaxminbigint.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithmaxmininteger.csv b/integration/spark/src/test/resources/datawithmaxmininteger.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithmaxmininteger.csv
rename to integration/spark/src/test/resources/datawithmaxmininteger.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithminbigint.csv b/integration/spark/src/test/resources/datawithminbigint.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithminbigint.csv
rename to integration/spark/src/test/resources/datawithminbigint.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithmininteger.csv b/integration/spark/src/test/resources/datawithmininteger.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithmininteger.csv
rename to integration/spark/src/test/resources/datawithmininteger.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithnullmeasure.csv b/integration/spark/src/test/resources/datawithnullmeasure.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithnullmeasure.csv
rename to integration/spark/src/test/resources/datawithnullmeasure.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithnullmsrs.csv b/integration/spark/src/test/resources/datawithnullmsrs.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithnullmsrs.csv
rename to integration/spark/src/test/resources/datawithnullmsrs.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithoutheader.csv b/integration/spark/src/test/resources/datawithoutheader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithoutheader.csv
rename to integration/spark/src/test/resources/datawithoutheader.csv
diff --git a/integration/spark-common-test/src/test/resources/datawithspecialcharacter.csv b/integration/spark/src/test/resources/datawithspecialcharacter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datawithspecialcharacter.csv
rename to integration/spark/src/test/resources/datawithspecialcharacter.csv
diff --git a/integration/spark-common-test/src/test/resources/datedatafile.csv b/integration/spark/src/test/resources/datedatafile.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/datedatafile.csv
rename to integration/spark/src/test/resources/datedatafile.csv
diff --git a/integration/spark-common-test/src/test/resources/dblocation/test.csv b/integration/spark/src/test/resources/dblocation/test.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dblocation/test.csv
rename to integration/spark/src/test/resources/dblocation/test.csv
diff --git a/integration/spark-common-test/src/test/resources/decimalBoundaryDataCarbon.csv b/integration/spark/src/test/resources/decimalBoundaryDataCarbon.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/decimalBoundaryDataCarbon.csv
rename to integration/spark/src/test/resources/decimalBoundaryDataCarbon.csv
diff --git a/integration/spark-common-test/src/test/resources/decimalBoundaryDataHive.csv b/integration/spark/src/test/resources/decimalBoundaryDataHive.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/decimalBoundaryDataHive.csv
rename to integration/spark/src/test/resources/decimalBoundaryDataHive.csv
diff --git a/integration/spark-common-test/src/test/resources/decimalData.csv b/integration/spark/src/test/resources/decimalData.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/decimalData.csv
rename to integration/spark/src/test/resources/decimalData.csv
diff --git a/integration/spark-common-test/src/test/resources/decimalDataWithHeader.csv b/integration/spark/src/test/resources/decimalDataWithHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/decimalDataWithHeader.csv
rename to integration/spark/src/test/resources/decimalDataWithHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/decimalDataWithoutHeader.csv b/integration/spark/src/test/resources/decimalDataWithoutHeader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/decimalDataWithoutHeader.csv
rename to integration/spark/src/test/resources/decimalDataWithoutHeader.csv
diff --git a/integration/spark-common-test/src/test/resources/decimal_int_range.csv b/integration/spark/src/test/resources/decimal_int_range.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/decimal_int_range.csv
rename to integration/spark/src/test/resources/decimal_int_range.csv
diff --git a/integration/spark-common-test/src/test/resources/deviceInformationId.csv b/integration/spark/src/test/resources/deviceInformationId.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/deviceInformationId.csv
rename to integration/spark/src/test/resources/deviceInformationId.csv
diff --git a/integration/spark-common-test/src/test/resources/deviceInformationId2.csv b/integration/spark/src/test/resources/deviceInformationId2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/deviceInformationId2.csv
rename to integration/spark/src/test/resources/deviceInformationId2.csv
diff --git a/integration/spark-common-test/src/test/resources/dimSample.csv b/integration/spark/src/test/resources/dimSample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dimSample.csv
rename to integration/spark/src/test/resources/dimSample.csv
diff --git a/integration/spark-common-test/src/test/resources/dimTableSample.csv b/integration/spark/src/test/resources/dimTableSample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/dimTableSample.csv
rename to integration/spark/src/test/resources/dimTableSample.csv
diff --git a/integration/spark-common-test/src/test/resources/double.csv b/integration/spark/src/test/resources/double.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double.csv
rename to integration/spark/src/test/resources/double.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_byte.csv b/integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_byte.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_byte.csv
rename to integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_byte.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_int.csv b/integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_int.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_int.csv
rename to integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_int.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_short.csv b/integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_short.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_short.csv
rename to integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_short.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_short_int.csv b/integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_short_int.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_AdaptiveFloating_short_int.csv
rename to integration/spark/src/test/resources/double/data_notitle_AdaptiveFloating_short_int.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_byte.csv b/integration/spark/src/test/resources/double/data_notitle_byte.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_byte.csv
rename to integration/spark/src/test/resources/double/data_notitle_byte.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_int.csv b/integration/spark/src/test/resources/double/data_notitle_int.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_int.csv
rename to integration/spark/src/test/resources/double/data_notitle_int.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_long.csv b/integration/spark/src/test/resources/double/data_notitle_long.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_long.csv
rename to integration/spark/src/test/resources/double/data_notitle_long.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_short.csv b/integration/spark/src/test/resources/double/data_notitle_short.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_short.csv
rename to integration/spark/src/test/resources/double/data_notitle_short.csv
diff --git a/integration/spark-common-test/src/test/resources/double/data_notitle_short_int.csv b/integration/spark/src/test/resources/double/data_notitle_short_int.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/double/data_notitle_short_int.csv
rename to integration/spark/src/test/resources/double/data_notitle_short_int.csv
diff --git a/integration/spark-common-test/src/test/resources/emp.csv b/integration/spark/src/test/resources/emp.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/emp.csv
rename to integration/spark/src/test/resources/emp.csv
diff --git a/integration/spark-common-test/src/test/resources/emptyDimensionData.csv b/integration/spark/src/test/resources/emptyDimensionData.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/emptyDimensionData.csv
rename to integration/spark/src/test/resources/emptyDimensionData.csv
diff --git a/integration/spark-common-test/src/test/resources/emptyDimensionDataHive.csv b/integration/spark/src/test/resources/emptyDimensionDataHive.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/emptyDimensionDataHive.csv
rename to integration/spark/src/test/resources/emptyDimensionDataHive.csv
diff --git a/integration/spark-common-test/src/test/resources/emptylines.csv b/integration/spark/src/test/resources/emptylines.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/emptylines.csv
rename to integration/spark/src/test/resources/emptylines.csv
diff --git a/integration/spark-common-test/src/test/resources/emptyrow/csvwithonlyspacechar.csv b/integration/spark/src/test/resources/emptyrow/csvwithonlyspacechar.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/emptyrow/csvwithonlyspacechar.csv
rename to integration/spark/src/test/resources/emptyrow/csvwithonlyspacechar.csv
diff --git a/integration/spark-common-test/src/test/resources/emptyrow/emptyRows.csv b/integration/spark/src/test/resources/emptyrow/emptyRows.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/emptyrow/emptyRows.csv
rename to integration/spark/src/test/resources/emptyrow/emptyRows.csv
diff --git a/integration/spark-common-test/src/test/resources/encoding_types.csv b/integration/spark/src/test/resources/encoding_types.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/encoding_types.csv
rename to integration/spark/src/test/resources/encoding_types.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/betweenFilter.csv b/integration/spark/src/test/resources/filter/betweenFilter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/betweenFilter.csv
rename to integration/spark/src/test/resources/filter/betweenFilter.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/datagrtlrt.csv b/integration/spark/src/test/resources/filter/datagrtlrt.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/datagrtlrt.csv
rename to integration/spark/src/test/resources/filter/datagrtlrt.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/datawithnull.csv b/integration/spark/src/test/resources/filter/datawithnull.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/datawithnull.csv
rename to integration/spark/src/test/resources/filter/datawithnull.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/datawithoutnull.csv b/integration/spark/src/test/resources/filter/datawithoutnull.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/datawithoutnull.csv
rename to integration/spark/src/test/resources/filter/datawithoutnull.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/emp2.csv b/integration/spark/src/test/resources/filter/emp2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/emp2.csv
rename to integration/spark/src/test/resources/filter/emp2.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/emp2allnull.csv b/integration/spark/src/test/resources/filter/emp2allnull.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/emp2allnull.csv
rename to integration/spark/src/test/resources/filter/emp2allnull.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/emp2nonull.csv b/integration/spark/src/test/resources/filter/emp2nonull.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/emp2nonull.csv
rename to integration/spark/src/test/resources/filter/emp2nonull.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/notEqualToFilter.csv b/integration/spark/src/test/resources/filter/notEqualToFilter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/notEqualToFilter.csv
rename to integration/spark/src/test/resources/filter/notEqualToFilter.csv
diff --git a/integration/spark-common-test/src/test/resources/filter/notNullFilter.csv b/integration/spark/src/test/resources/filter/notNullFilter.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/filter/notNullFilter.csv
rename to integration/spark/src/test/resources/filter/notNullFilter.csv
diff --git a/integration/spark-common-test/src/test/resources/floatSample.csv b/integration/spark/src/test/resources/floatSample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/floatSample.csv
rename to integration/spark/src/test/resources/floatSample.csv
diff --git a/integration/spark-common-test/src/test/resources/geodata.csv b/integration/spark/src/test/resources/geodata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/geodata.csv
rename to integration/spark/src/test/resources/geodata.csv
diff --git a/integration/spark-common-test/src/test/resources/globalsort/sample1.csv b/integration/spark/src/test/resources/globalsort/sample1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/globalsort/sample1.csv
rename to integration/spark/src/test/resources/globalsort/sample1.csv
diff --git a/integration/spark-common-test/src/test/resources/globalsort/sample2.csv b/integration/spark/src/test/resources/globalsort/sample2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/globalsort/sample2.csv
rename to integration/spark/src/test/resources/globalsort/sample2.csv
diff --git a/integration/spark-common-test/src/test/resources/globalsort/sample3.csv b/integration/spark/src/test/resources/globalsort/sample3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/globalsort/sample3.csv
rename to integration/spark/src/test/resources/globalsort/sample3.csv
diff --git a/integration/spark-common-test/src/test/resources/hiverangenodictionarycompare.csv b/integration/spark/src/test/resources/hiverangenodictionarycompare.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/hiverangenodictionarycompare.csv
rename to integration/spark/src/test/resources/hiverangenodictionarycompare.csv
diff --git a/integration/spark-common-test/src/test/resources/invalidMeasures.csv b/integration/spark/src/test/resources/invalidMeasures.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/invalidMeasures.csv
rename to integration/spark/src/test/resources/invalidMeasures.csv
diff --git a/integration/spark-datasource/src/test/resources/j2.csv b/integration/spark/src/test/resources/j2.csv
similarity index 100%
rename from integration/spark-datasource/src/test/resources/j2.csv
rename to integration/spark/src/test/resources/j2.csv
diff --git a/integration/spark-common-test/src/test/resources/join/data1.csv b/integration/spark/src/test/resources/join/data1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/join/data1.csv
rename to integration/spark/src/test/resources/join/data1.csv
diff --git a/integration/spark-common-test/src/test/resources/join/data2.csv b/integration/spark/src/test/resources/join/data2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/join/data2.csv
rename to integration/spark/src/test/resources/join/data2.csv
diff --git a/integration/spark-common-test/src/test/resources/join/emp.csv b/integration/spark/src/test/resources/join/emp.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/join/emp.csv
rename to integration/spark/src/test/resources/join/emp.csv
diff --git a/integration/spark-common-test/src/test/resources/join/employee.csv b/integration/spark/src/test/resources/join/employee.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/join/employee.csv
rename to integration/spark/src/test/resources/join/employee.csv
diff --git a/integration/spark-common-test/src/test/resources/join/mgr.csv b/integration/spark/src/test/resources/join/mgr.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/join/mgr.csv
rename to integration/spark/src/test/resources/join/mgr.csv
diff --git a/integration/spark-common-test/src/test/resources/join/mobile.csv b/integration/spark/src/test/resources/join/mobile.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/join/mobile.csv
rename to integration/spark/src/test/resources/join/mobile.csv
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/PrimitiveTypeWithNull.json b/integration/spark/src/test/resources/jsonFiles/data/PrimitiveTypeWithNull.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/PrimitiveTypeWithNull.json
rename to integration/spark/src/test/resources/jsonFiles/data/PrimitiveTypeWithNull.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/StructOfAllTypes.json b/integration/spark/src/test/resources/jsonFiles/data/StructOfAllTypes.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/StructOfAllTypes.json
rename to integration/spark/src/test/resources/jsonFiles/data/StructOfAllTypes.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/allPrimitiveType.json b/integration/spark/src/test/resources/jsonFiles/data/allPrimitiveType.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/allPrimitiveType.json
rename to integration/spark/src/test/resources/jsonFiles/data/allPrimitiveType.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/allPrimitiveTypeBadRecord.json b/integration/spark/src/test/resources/jsonFiles/data/allPrimitiveTypeBadRecord.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/allPrimitiveTypeBadRecord.json
rename to integration/spark/src/test/resources/jsonFiles/data/allPrimitiveTypeBadRecord.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/arrayOfStructOfStruct.json b/integration/spark/src/test/resources/jsonFiles/data/arrayOfStructOfStruct.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/arrayOfStructOfStruct.json
rename to integration/spark/src/test/resources/jsonFiles/data/arrayOfStructOfStruct.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/arrayOfarrayOfarrayOfStruct.json b/integration/spark/src/test/resources/jsonFiles/data/arrayOfarrayOfarrayOfStruct.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/arrayOfarrayOfarrayOfStruct.json
rename to integration/spark/src/test/resources/jsonFiles/data/arrayOfarrayOfarrayOfStruct.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/MultipleRowSingleLineJson.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/MultipleRowSingleLineJson.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/MultipleRowSingleLineJson.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/MultipleRowSingleLineJson.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/SingleRowSingleLineJson.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/SingleRowSingleLineJson.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/SingleRowSingleLineJson.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/SingleRowSingleLineJson.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/MultipleRowMultipleLineJsonWithRecordIdentifier.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/MultipleRowMultipleLineJsonWithRecordIdentifier.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/MultipleRowMultipleLineJsonWithRecordIdentifier.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/MultipleRowMultipleLineJsonWithRecordIdentifier.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowMultipleLineJsonWithRecordIdentifier.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowMultipleLineJsonWithRecordIdentifier.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowMultipleLineJsonWithRecordIdentifier.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowMultipleLineJsonWithRecordIdentifier.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowSingleLineJsonWithRecordIdentifier.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowSingleLineJsonWithRecordIdentifier.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowSingleLineJsonWithRecordIdentifier.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/JsonReaderTest/withRecordIdentifier/SingleRowSingleLineJsonWithRecordIdentifier.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeMultipleRows.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeMultipleRows.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeMultipleRows.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeMultipleRows.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeSingleArray.json b/integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeSingleArray.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeSingleArray.json
rename to integration/spark/src/test/resources/jsonFiles/data/similarSchemaFiles/allPrimitiveTypeSingleArray.json
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/schema/StructOfAllTypes.avsc b/integration/spark/src/test/resources/jsonFiles/schema/StructOfAllTypes.avsc
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/schema/StructOfAllTypes.avsc
rename to integration/spark/src/test/resources/jsonFiles/schema/StructOfAllTypes.avsc
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/schema/arrayOfStructOfStruct.avsc b/integration/spark/src/test/resources/jsonFiles/schema/arrayOfStructOfStruct.avsc
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/schema/arrayOfStructOfStruct.avsc
rename to integration/spark/src/test/resources/jsonFiles/schema/arrayOfStructOfStruct.avsc
diff --git a/integration/spark-common-test/src/test/resources/jsonFiles/schema/arrayOfarrayOfarrayOfStruct.avsc b/integration/spark/src/test/resources/jsonFiles/schema/arrayOfarrayOfarrayOfStruct.avsc
similarity index 100%
rename from integration/spark-common-test/src/test/resources/jsonFiles/schema/arrayOfarrayOfarrayOfStruct.avsc
rename to integration/spark/src/test/resources/jsonFiles/schema/arrayOfarrayOfarrayOfStruct.avsc
diff --git a/integration/spark-common-test/src/test/resources/lessthandatacolumndata.csv b/integration/spark/src/test/resources/lessthandatacolumndata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/lessthandatacolumndata.csv
rename to integration/spark/src/test/resources/lessthandatacolumndata.csv
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/.invisibilityfile b/integration/spark/src/test/resources/loadMultiFiles/.invisibilityfile
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/.invisibilityfile
rename to integration/spark/src/test/resources/loadMultiFiles/.invisibilityfile
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/_SUCCESS b/integration/spark/src/test/resources/loadMultiFiles/_SUCCESS
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/_SUCCESS
rename to integration/spark/src/test/resources/loadMultiFiles/_SUCCESS
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/data.csv b/integration/spark/src/test/resources/loadMultiFiles/data.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/data.csv
rename to integration/spark/src/test/resources/loadMultiFiles/data.csv
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/emptyfile.csv b/integration/spark/src/test/resources/loadMultiFiles/emptyfile.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/emptyfile.csv
rename to integration/spark/src/test/resources/loadMultiFiles/emptyfile.csv
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/nestedfolder1/data.csv b/integration/spark/src/test/resources/loadMultiFiles/nestedfolder1/data.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/nestedfolder1/data.csv
rename to integration/spark/src/test/resources/loadMultiFiles/nestedfolder1/data.csv
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/nestedfolder1/data1.csv b/integration/spark/src/test/resources/loadMultiFiles/nestedfolder1/data1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/nestedfolder1/data1.csv
rename to integration/spark/src/test/resources/loadMultiFiles/nestedfolder1/data1.csv
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/nestedfolder1/nestedfolder2/data.csv b/integration/spark/src/test/resources/loadMultiFiles/nestedfolder1/nestedfolder2/data.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/nestedfolder1/nestedfolder2/data.csv
rename to integration/spark/src/test/resources/loadMultiFiles/nestedfolder1/nestedfolder2/data.csv
diff --git a/integration/spark-common-test/src/test/resources/loadMultiFiles/non-csv b/integration/spark/src/test/resources/loadMultiFiles/non-csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/loadMultiFiles/non-csv
rename to integration/spark/src/test/resources/loadMultiFiles/non-csv
diff --git a/integration/spark-common-test/src/test/resources/localdictionary.csv b/integration/spark/src/test/resources/localdictionary.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/localdictionary.csv
rename to integration/spark/src/test/resources/localdictionary.csv
diff --git a/integration/spark-common-test/src/test/resources/locationInfoActiveCountry.csv b/integration/spark/src/test/resources/locationInfoActiveCountry.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/locationInfoActiveCountry.csv
rename to integration/spark/src/test/resources/locationInfoActiveCountry.csv
diff --git a/integration/spark-common-test/src/test/resources/mac.csv b/integration/spark/src/test/resources/mac.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/mac.csv
rename to integration/spark/src/test/resources/mac.csv
diff --git a/integration/spark-common-test/src/test/resources/measureinsertintotest.csv b/integration/spark/src/test/resources/measureinsertintotest.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/measureinsertintotest.csv
rename to integration/spark/src/test/resources/measureinsertintotest.csv
diff --git a/integration/spark-common-test/src/test/resources/mobileimei.csv b/integration/spark/src/test/resources/mobileimei.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/mobileimei.csv
rename to integration/spark/src/test/resources/mobileimei.csv
diff --git a/integration/spark-common-test/src/test/resources/mv_sampledata.csv b/integration/spark/src/test/resources/mv_sampledata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/mv_sampledata.csv
rename to integration/spark/src/test/resources/mv_sampledata.csv
diff --git a/integration/spark-common-test/src/test/resources/newsample.csv b/integration/spark/src/test/resources/newsample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/newsample.csv
rename to integration/spark/src/test/resources/newsample.csv
diff --git a/integration/spark-common-test/src/test/resources/noneCsvFormat.cs b/integration/spark/src/test/resources/noneCsvFormat.cs
similarity index 100%
rename from integration/spark-common-test/src/test/resources/noneCsvFormat.cs
rename to integration/spark/src/test/resources/noneCsvFormat.cs
diff --git a/integration/spark-common-test/src/test/resources/nontransactional.csv b/integration/spark/src/test/resources/nontransactional.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/nontransactional.csv
rename to integration/spark/src/test/resources/nontransactional.csv
diff --git a/integration/spark-common-test/src/test/resources/nontransactional1.csv b/integration/spark/src/test/resources/nontransactional1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/nontransactional1.csv
rename to integration/spark/src/test/resources/nontransactional1.csv
diff --git a/integration/spark-common-test/src/test/resources/nullSample.csv b/integration/spark/src/test/resources/nullSample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/nullSample.csv
rename to integration/spark/src/test/resources/nullSample.csv
diff --git a/integration/spark-common-test/src/test/resources/nullandnonparsableValue.csv b/integration/spark/src/test/resources/nullandnonparsableValue.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/nullandnonparsableValue.csv
rename to integration/spark/src/test/resources/nullandnonparsableValue.csv
diff --git a/integration/spark-common-test/src/test/resources/nullmeasurevalue.csv b/integration/spark/src/test/resources/nullmeasurevalue.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/nullmeasurevalue.csv
rename to integration/spark/src/test/resources/nullmeasurevalue.csv
diff --git a/integration/spark-common-test/src/test/resources/nullvalueserialization.csv b/integration/spark/src/test/resources/nullvalueserialization.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/nullvalueserialization.csv
rename to integration/spark/src/test/resources/nullvalueserialization.csv
diff --git a/integration/spark-common-test/src/test/resources/numeric_column_invalid_values.csv b/integration/spark/src/test/resources/numeric_column_invalid_values.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/numeric_column_invalid_values.csv
rename to integration/spark/src/test/resources/numeric_column_invalid_values.csv
diff --git a/integration/spark-common-test/src/test/resources/oscon_10.csv b/integration/spark/src/test/resources/oscon_10.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/oscon_10.csv
rename to integration/spark/src/test/resources/oscon_10.csv
diff --git a/integration/spark-common-test/src/test/resources/outofrange.csv b/integration/spark/src/test/resources/outofrange.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/outofrange.csv
rename to integration/spark/src/test/resources/outofrange.csv
diff --git a/integration/spark-common-test/src/test/resources/overwriteTable1_noRecord.csv b/integration/spark/src/test/resources/overwriteTable1_noRecord.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/overwriteTable1_noRecord.csv
rename to integration/spark/src/test/resources/overwriteTable1_noRecord.csv
diff --git a/integration/spark-common-test/src/test/resources/overwriteTable1_someRecord.csv b/integration/spark/src/test/resources/overwriteTable1_someRecord.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/overwriteTable1_someRecord.csv
rename to integration/spark/src/test/resources/overwriteTable1_someRecord.csv
diff --git a/integration/spark-common-test/src/test/resources/overwriteTable2_noRecord.csv b/integration/spark/src/test/resources/overwriteTable2_noRecord.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/overwriteTable2_noRecord.csv
rename to integration/spark/src/test/resources/overwriteTable2_noRecord.csv
diff --git a/integration/spark-common-test/src/test/resources/overwriteTable2_someRecord.csv b/integration/spark/src/test/resources/overwriteTable2_someRecord.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/overwriteTable2_someRecord.csv
rename to integration/spark/src/test/resources/overwriteTable2_someRecord.csv
diff --git a/integration/spark-common-test/src/test/resources/partData.csv b/integration/spark/src/test/resources/partData.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/partData.csv
rename to integration/spark/src/test/resources/partData.csv
diff --git a/integration/spark-common-test/src/test/resources/partition_data.csv b/integration/spark/src/test/resources/partition_data.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/partition_data.csv
rename to integration/spark/src/test/resources/partition_data.csv
diff --git a/integration/spark-common-test/src/test/resources/partition_data_example.csv b/integration/spark/src/test/resources/partition_data_example.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/partition_data_example.csv
rename to integration/spark/src/test/resources/partition_data_example.csv
diff --git a/integration/spark-common-test/src/test/resources/predefdic/allpredefdictionary.csv b/integration/spark/src/test/resources/predefdic/allpredefdictionary.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/predefdic/allpredefdictionary.csv
rename to integration/spark/src/test/resources/predefdic/allpredefdictionary.csv
diff --git a/integration/spark-common-test/src/test/resources/predefdic/data3.csv b/integration/spark/src/test/resources/predefdic/data3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/predefdic/data3.csv
rename to integration/spark/src/test/resources/predefdic/data3.csv
diff --git a/integration/spark-common-test/src/test/resources/predefdic/dicfilepath.csv b/integration/spark/src/test/resources/predefdic/dicfilepath.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/predefdic/dicfilepath.csv
rename to integration/spark/src/test/resources/predefdic/dicfilepath.csv
diff --git a/integration/spark-common-test/src/test/resources/products.csv b/integration/spark/src/test/resources/products.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/products.csv
rename to integration/spark/src/test/resources/products.csv
diff --git a/integration/spark-common-test/src/test/resources/range_column/dataskew.csv b/integration/spark/src/test/resources/range_column/dataskew.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/range_column/dataskew.csv
rename to integration/spark/src/test/resources/range_column/dataskew.csv
diff --git a/integration/spark-common-test/src/test/resources/rangedata.csv b/integration/spark/src/test/resources/rangedata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/rangedata.csv
rename to integration/spark/src/test/resources/rangedata.csv
diff --git a/integration/spark-common-test/src/test/resources/rangedatasample.csv b/integration/spark/src/test/resources/rangedatasample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/rangedatasample.csv
rename to integration/spark/src/test/resources/rangedatasample.csv
diff --git a/integration/spark-common-test/src/test/resources/rangenodictionarycompare.csv b/integration/spark/src/test/resources/rangenodictionarycompare.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/rangenodictionarycompare.csv
rename to integration/spark/src/test/resources/rangenodictionarycompare.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data1.csv b/integration/spark/src/test/resources/restructure/data1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data1.csv
rename to integration/spark/src/test/resources/restructure/data1.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data2.csv b/integration/spark/src/test/resources/restructure/data2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data2.csv
rename to integration/spark/src/test/resources/restructure/data2.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data3.csv b/integration/spark/src/test/resources/restructure/data3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data3.csv
rename to integration/spark/src/test/resources/restructure/data3.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data4.csv b/integration/spark/src/test/resources/restructure/data4.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data4.csv
rename to integration/spark/src/test/resources/restructure/data4.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data5.csv b/integration/spark/src/test/resources/restructure/data5.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data5.csv
rename to integration/spark/src/test/resources/restructure/data5.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data6.csv b/integration/spark/src/test/resources/restructure/data6.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data6.csv
rename to integration/spark/src/test/resources/restructure/data6.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data7.csv b/integration/spark/src/test/resources/restructure/data7.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data7.csv
rename to integration/spark/src/test/resources/restructure/data7.csv
diff --git a/integration/spark-common-test/src/test/resources/restructure/data_2000.csv b/integration/spark/src/test/resources/restructure/data_2000.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/restructure/data_2000.csv
rename to integration/spark/src/test/resources/restructure/data_2000.csv
diff --git a/integration/spark-common-test/src/test/resources/sales_data.csv b/integration/spark/src/test/resources/sales_data.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sales_data.csv
rename to integration/spark/src/test/resources/sales_data.csv
diff --git a/integration/spark-common-test/src/test/resources/sample b/integration/spark/src/test/resources/sample
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sample
rename to integration/spark/src/test/resources/sample
diff --git a/integration/spark-common-test/src/test/resources/sample.csv b/integration/spark/src/test/resources/sample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sample.csv
rename to integration/spark/src/test/resources/sample.csv
diff --git a/integration/spark-common-test/src/test/resources/sample.csv.bz2 b/integration/spark/src/test/resources/sample.csv.bz2
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sample.csv.bz2
rename to integration/spark/src/test/resources/sample.csv.bz2
diff --git a/integration/spark-common-test/src/test/resources/sample.csv.gz b/integration/spark/src/test/resources/sample.csv.gz
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sample.csv.gz
rename to integration/spark/src/test/resources/sample.csv.gz
diff --git a/integration/spark-common-test/src/test/resources/sampleComplex.csv b/integration/spark/src/test/resources/sampleComplex.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sampleComplex.csv
rename to integration/spark/src/test/resources/sampleComplex.csv
diff --git a/integration/spark-common-test/src/test/resources/sample_withDelimiter017.csv b/integration/spark/src/test/resources/sample_withDelimiter017.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sample_withDelimiter017.csv
rename to integration/spark/src/test/resources/sample_withDelimiter017.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/IUD/sample_1.csv b/integration/spark/src/test/resources/secindex/IUD/sample_1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/IUD/sample_1.csv
rename to integration/spark/src/test/resources/secindex/IUD/sample_1.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/IUD/sample_2.csv b/integration/spark/src/test/resources/secindex/IUD/sample_2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/IUD/sample_2.csv
rename to integration/spark/src/test/resources/secindex/IUD/sample_2.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/data_10000.csv b/integration/spark/src/test/resources/secindex/data_10000.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/data_10000.csv
rename to integration/spark/src/test/resources/secindex/data_10000.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/datafile_100.csv b/integration/spark/src/test/resources/secindex/datafile_100.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/datafile_100.csv
rename to integration/spark/src/test/resources/secindex/datafile_100.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/dest.csv b/integration/spark/src/test/resources/secindex/dest.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/dest.csv
rename to integration/spark/src/test/resources/secindex/dest.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/dest1.csv b/integration/spark/src/test/resources/secindex/dest1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/dest1.csv
rename to integration/spark/src/test/resources/secindex/dest1.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/dest2.csv b/integration/spark/src/test/resources/secindex/dest2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/dest2.csv
rename to integration/spark/src/test/resources/secindex/dest2.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/dest3.csv b/integration/spark/src/test/resources/secindex/dest3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/dest3.csv
rename to integration/spark/src/test/resources/secindex/dest3.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/firstunique.csv b/integration/spark/src/test/resources/secindex/firstunique.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/firstunique.csv
rename to integration/spark/src/test/resources/secindex/firstunique.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/index.csv b/integration/spark/src/test/resources/secindex/index.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/index.csv
rename to integration/spark/src/test/resources/secindex/index.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/secondaryIndexLikeTest.csv b/integration/spark/src/test/resources/secindex/secondaryIndexLikeTest.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/secondaryIndexLikeTest.csv
rename to integration/spark/src/test/resources/secindex/secondaryIndexLikeTest.csv
diff --git a/integration/spark-common-test/src/test/resources/secindex/secondunique.csv b/integration/spark/src/test/resources/secindex/secondunique.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/secindex/secondunique.csv
rename to integration/spark/src/test/resources/secindex/secondunique.csv
diff --git a/integration/spark-common-test/src/test/resources/IUD/source3.csv b/integration/spark/src/test/resources/secindex/source3.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/IUD/source3.csv
rename to integration/spark/src/test/resources/secindex/source3.csv
diff --git a/integration/spark-common-test/src/test/resources/seq_20Records.csv b/integration/spark/src/test/resources/seq_20Records.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/seq_20Records.csv
rename to integration/spark/src/test/resources/seq_20Records.csv
diff --git a/integration/spark-common-test/src/test/resources/shortintboundary.csv b/integration/spark/src/test/resources/shortintboundary.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/shortintboundary.csv
rename to integration/spark/src/test/resources/shortintboundary.csv
diff --git a/integration/spark-common-test/src/test/resources/shortolap.csv b/integration/spark/src/test/resources/shortolap.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/shortolap.csv
rename to integration/spark/src/test/resources/shortolap.csv
diff --git a/integration/spark-common-test/src/test/resources/sort_columns/alldatatype1.csv b/integration/spark/src/test/resources/sort_columns/alldatatype1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sort_columns/alldatatype1.csv
rename to integration/spark/src/test/resources/sort_columns/alldatatype1.csv
diff --git a/integration/spark-common-test/src/test/resources/sort_columns/alldatatype2.csv b/integration/spark/src/test/resources/sort_columns/alldatatype2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/sort_columns/alldatatype2.csv
rename to integration/spark/src/test/resources/sort_columns/alldatatype2.csv
diff --git a/integration/spark-common-test/src/test/resources/source.csv b/integration/spark/src/test/resources/source.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/source.csv
rename to integration/spark/src/test/resources/source.csv
diff --git a/integration/spark-common-test/src/test/resources/source_without_header.csv b/integration/spark/src/test/resources/source_without_header.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/source_without_header.csv
rename to integration/spark/src/test/resources/source_without_header.csv
diff --git a/integration/spark-common-test/src/test/resources/streamSample.csv b/integration/spark/src/test/resources/streamSample.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/streamSample.csv
rename to integration/spark/src/test/resources/streamSample.csv
diff --git a/integration/spark-common-test/src/test/resources/streamSample_with_long_string.csv b/integration/spark/src/test/resources/streamSample_with_long_string.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/streamSample_with_long_string.csv
rename to integration/spark/src/test/resources/streamSample_with_long_string.csv
diff --git a/integration/spark-common-test/src/test/resources/struct_all.csv b/integration/spark/src/test/resources/struct_all.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/struct_all.csv
rename to integration/spark/src/test/resources/struct_all.csv
diff --git a/integration/spark-datasource/src/test/resources/structofarray.csv b/integration/spark/src/test/resources/structofarray.csv
similarity index 99%
rename from integration/spark-datasource/src/test/resources/structofarray.csv
rename to integration/spark/src/test/resources/structofarray.csv
index fec3320..bf1bfd2 100644
--- a/integration/spark-datasource/src/test/resources/structofarray.csv
+++ b/integration/spark/src/test/resources/structofarray.csv
@@ -1,21 +1,21 @@
-Cust00000000000000000000,2015,1,20,M,SSC,Y,123456789$2015-01-01  00:00:00$100&3000$100.123&3000.234$United Kingdom&England$2015-01-01  00:00:00&2014-01-01  00:00:00,42,104,160,325046028.8,859616748.6
-Cust00000000000000000001,2015,1,30,F,Degree,N,123456790$2015-01-02  00:00:00$101&3000$101.123&3001.234$United States&MO$2015-01-02  00:00:00&2014-01-02  00:00:00,141,181,54,378476092.1,818599132.6
-Cust00000000000000000002,2015,1,40,M,graduation,D,123456791$2015-01-03  00:00:00$102&3000$102.123&3002.234$United States&OR$2015-01-03  00:00:00&2014-01-03  00:00:00,138,43,175,408335001.4,906020942.6
-Cust00000000000000000003,2015,1,50,F,PG,Y,123456792$2015-01-04  00:00:00$103&3000$103.123&3003.234$Australia&Victoria$2015-01-04  00:00:00&2014-01-04  00:00:00,96,63,184,493146274.5,556184083.3
-Cust00000000000000000004,2015,1,60,M,MS,N,123456793$2015-01-05  00:00:00$104&3000$104.123&3004.234$United States&AL$2015-01-05  00:00:00&2014-01-05  00:00:00,115,172,165,457941392.3,641744932.5
-Cust00000000000000000005,2015,1,70,F,Doctor,D,123456794$2015-01-06  00:00:00$105&3000$105.123&3005.234$United States&NJ$2015-01-06  00:00:00&2014-01-06  00:00:00,178,192,178,112452170.2,502438883.3
-Cust00000000000000000006,2015,1,80,M,Layer,Y,123456795$2015-01-07  00:00:00$106&3000$106.123&3006.234$United States&IL$2015-01-07  00:00:00&2014-01-07  00:00:00,172,194,49,943273831.2,37711205.33
-Cust00000000000000000007,2015,1,90,F,Cop,N,123456796$2015-01-08  00:00:00$107&3000$107.123&3007.234$United States&TN$2015-01-08  00:00:00&2014-01-08  00:00:00,163,23,180,991766321.3,452456856.7
-Cust00000000000000000008,2015,1,95,M,Bank,D,123456797$2015-01-09  00:00:00$108&3000$108.123&3008.234$Israel&Tel Aviv$2015-01-09  00:00:00&2014-01-09  00:00:00,113,18,176,747561503.5,388896200.6
-Cust00000000000000000009,2015,1,45,F,Group1,Y,123456798$2015-01-10  00:00:00$109&3000$109.123&3009.234$France&Ile-de-France$2015-01-10  00:00:00&2014-01-10  00:00:00,50,99,10,667010292.4,910085933.7
-Cust00000000000000000010,2015,1,20,M,Group2,N,123456799$2015-01-11  00:00:00$110&3000$110.123&3010.234$United States&NY$2015-01-11  00:00:00&2014-01-11  00:00:00,87,38,27,490913423.8,732302478
-Cust00000000000000000011,2015,1,30,F,Group3,D,123456800$2015-01-12  00:00:00$111&3000$111.123&3011.234$Netherlands&Noord-Brabant$2015-01-12  00:00:00&2014-01-12  00:00:00,83,113,114,143467881.4,856281203.2
-Cust00000000000000000012,2015,1,40,M,Group4,Y,123456801$2015-01-13  00:00:00$112&3000$112.123&3012.234$United States&TX$2015-01-13  00:00:00&2014-01-13  00:00:00,141,159,82,574817864.5,855050321.4
-Cust00000000000000000013,2015,1,50,F,Group5,N,123456802$2015-01-14  00:00:00$113&3000$113.123&3013.234$United States&ID$2015-01-14  00:00:00&2014-01-14  00:00:00,148,188,155,421169023.6,72662265.24
-Cust00000000000000000014,2015,1,60,M,Group6,D,123456803$2015-01-15  00:00:00$114&3000$114.123&3014.234$United States&NJ$2015-01-15  00:00:00&2014-01-15  00:00:00,56,194,21,859080548.6,678050965.3
-Cust00000000000000000015,2015,1,70,F,SSC,Y,123456804$2015-01-16  00:00:00$115&3000$115.123&3015.234$Ireland&Meath$2015-01-16  00:00:00&2014-01-16  00:00:00,154,142,76,250204030.1,766100816.4
-Cust00000000000000000016,2015,1,80,M,Degree,N,123456805$2015-01-17  00:00:00$116&3000$116.123&3016.234$Canada&Ontario$2015-01-17  00:00:00&2014-01-17  00:00:00,44,106,66,123232522.7,98330280.09
-Cust00000000000000000017,2015,1,90,F,graduation,D,123456806$2015-01-18  00:00:00$117&3000$117.123&3017.234$India&Andhra Pradesh$2015-01-18  00:00:00&2014-01-18  00:00:00,133,49,48,739339891.6,20633802.45
-Cust00000000000000000018,2015,1,95,M,PG,Y,123456807$2015-01-19  00:00:00$118&3000$118.123&3018.234$United Kingdom&England$2015-01-19  00:00:00&2014-01-19  00:00:00,5,109,147,441325651.7,6536309.25
-Cust00000000000000000019,2015,1,45,F,MS,N,123456808$2015-01-20  00:00:00$119&3000$119.123&3019.234$United States&UT$2015-01-20  00:00:00&2014-01-20  00:00:00,38,172,172,25330134.99,657416760.8
-Cust00000000000000000020,2015,1,20,M,Doctor,D,123456809$2015-01-21  00:00:00$120&3000$120.123&3020.234$United Kingdom&England$2015-01-21  00:00:00&2014-01-21  00:00:00,59,48,5,473181158.1,648379863.2
+Cust00000000000000000000,2015,1,20,M,SSC,Y,123456789$2015-01-01  00:00:00$100&3000$100.123&3000.234$United Kingdom&England$2015-01-01  00:00:00&2014-01-01  00:00:00,42,104,160,325046028.8,859616748.6
+Cust00000000000000000001,2015,1,30,F,Degree,N,123456790$2015-01-02  00:00:00$101&3000$101.123&3001.234$United States&MO$2015-01-02  00:00:00&2014-01-02  00:00:00,141,181,54,378476092.1,818599132.6
+Cust00000000000000000002,2015,1,40,M,graduation,D,123456791$2015-01-03  00:00:00$102&3000$102.123&3002.234$United States&OR$2015-01-03  00:00:00&2014-01-03  00:00:00,138,43,175,408335001.4,906020942.6
+Cust00000000000000000003,2015,1,50,F,PG,Y,123456792$2015-01-04  00:00:00$103&3000$103.123&3003.234$Australia&Victoria$2015-01-04  00:00:00&2014-01-04  00:00:00,96,63,184,493146274.5,556184083.3
+Cust00000000000000000004,2015,1,60,M,MS,N,123456793$2015-01-05  00:00:00$104&3000$104.123&3004.234$United States&AL$2015-01-05  00:00:00&2014-01-05  00:00:00,115,172,165,457941392.3,641744932.5
+Cust00000000000000000005,2015,1,70,F,Doctor,D,123456794$2015-01-06  00:00:00$105&3000$105.123&3005.234$United States&NJ$2015-01-06  00:00:00&2014-01-06  00:00:00,178,192,178,112452170.2,502438883.3
+Cust00000000000000000006,2015,1,80,M,Layer,Y,123456795$2015-01-07  00:00:00$106&3000$106.123&3006.234$United States&IL$2015-01-07  00:00:00&2014-01-07  00:00:00,172,194,49,943273831.2,37711205.33
+Cust00000000000000000007,2015,1,90,F,Cop,N,123456796$2015-01-08  00:00:00$107&3000$107.123&3007.234$United States&TN$2015-01-08  00:00:00&2014-01-08  00:00:00,163,23,180,991766321.3,452456856.7
+Cust00000000000000000008,2015,1,95,M,Bank,D,123456797$2015-01-09  00:00:00$108&3000$108.123&3008.234$Israel&Tel Aviv$2015-01-09  00:00:00&2014-01-09  00:00:00,113,18,176,747561503.5,388896200.6
+Cust00000000000000000009,2015,1,45,F,Group1,Y,123456798$2015-01-10  00:00:00$109&3000$109.123&3009.234$France&Ile-de-France$2015-01-10  00:00:00&2014-01-10  00:00:00,50,99,10,667010292.4,910085933.7
+Cust00000000000000000010,2015,1,20,M,Group2,N,123456799$2015-01-11  00:00:00$110&3000$110.123&3010.234$United States&NY$2015-01-11  00:00:00&2014-01-11  00:00:00,87,38,27,490913423.8,732302478
+Cust00000000000000000011,2015,1,30,F,Group3,D,123456800$2015-01-12  00:00:00$111&3000$111.123&3011.234$Netherlands&Noord-Brabant$2015-01-12  00:00:00&2014-01-12  00:00:00,83,113,114,143467881.4,856281203.2
+Cust00000000000000000012,2015,1,40,M,Group4,Y,123456801$2015-01-13  00:00:00$112&3000$112.123&3012.234$United States&TX$2015-01-13  00:00:00&2014-01-13  00:00:00,141,159,82,574817864.5,855050321.4
+Cust00000000000000000013,2015,1,50,F,Group5,N,123456802$2015-01-14  00:00:00$113&3000$113.123&3013.234$United States&ID$2015-01-14  00:00:00&2014-01-14  00:00:00,148,188,155,421169023.6,72662265.24
+Cust00000000000000000014,2015,1,60,M,Group6,D,123456803$2015-01-15  00:00:00$114&3000$114.123&3014.234$United States&NJ$2015-01-15  00:00:00&2014-01-15  00:00:00,56,194,21,859080548.6,678050965.3
+Cust00000000000000000015,2015,1,70,F,SSC,Y,123456804$2015-01-16  00:00:00$115&3000$115.123&3015.234$Ireland&Meath$2015-01-16  00:00:00&2014-01-16  00:00:00,154,142,76,250204030.1,766100816.4
+Cust00000000000000000016,2015,1,80,M,Degree,N,123456805$2015-01-17  00:00:00$116&3000$116.123&3016.234$Canada&Ontario$2015-01-17  00:00:00&2014-01-17  00:00:00,44,106,66,123232522.7,98330280.09
+Cust00000000000000000017,2015,1,90,F,graduation,D,123456806$2015-01-18  00:00:00$117&3000$117.123&3017.234$India&Andhra Pradesh$2015-01-18  00:00:00&2014-01-18  00:00:00,133,49,48,739339891.6,20633802.45
+Cust00000000000000000018,2015,1,95,M,PG,Y,123456807$2015-01-19  00:00:00$118&3000$118.123&3018.234$United Kingdom&England$2015-01-19  00:00:00&2014-01-19  00:00:00,5,109,147,441325651.7,6536309.25
+Cust00000000000000000019,2015,1,45,F,MS,N,123456808$2015-01-20  00:00:00$119&3000$119.123&3019.234$United States&UT$2015-01-20  00:00:00&2014-01-20  00:00:00,38,172,172,25330134.99,657416760.8
+Cust00000000000000000020,2015,1,20,M,Doctor,D,123456809$2015-01-21  00:00:00$120&3000$120.123&3020.234$United Kingdom&England$2015-01-21  00:00:00&2014-01-21  00:00:00,59,48,5,473181158.1,648379863.2
diff --git a/integration/spark-common-test/src/test/resources/structusingstruct.csv b/integration/spark/src/test/resources/structusingstruct.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/structusingstruct.csv
rename to integration/spark/src/test/resources/structusingstruct.csv
diff --git a/integration/spark-common-test/src/test/resources/temp/data1.csv b/integration/spark/src/test/resources/temp/data1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/temp/data1.csv
rename to integration/spark/src/test/resources/temp/data1.csv
diff --git a/integration/spark-common-test/src/test/resources/test.json b/integration/spark/src/test/resources/test.json
similarity index 100%
rename from integration/spark-common-test/src/test/resources/test.json
rename to integration/spark/src/test/resources/test.json
diff --git a/integration/spark-common-test/src/test/resources/testBigInt_boundary_value.csv b/integration/spark/src/test/resources/testBigInt_boundary_value.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/testBigInt_boundary_value.csv
rename to integration/spark/src/test/resources/testBigInt_boundary_value.csv
diff --git a/integration/spark-common-test/src/test/resources/testShortAndIntDataType.csv b/integration/spark/src/test/resources/testShortAndIntDataType.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/testShortAndIntDataType.csv
rename to integration/spark/src/test/resources/testShortAndIntDataType.csv
diff --git a/integration/spark-datasource/src/test/resources/test_json.json b/integration/spark/src/test/resources/test_json.json
similarity index 100%
rename from integration/spark-datasource/src/test/resources/test_json.json
rename to integration/spark/src/test/resources/test_json.json
diff --git a/integration/spark-common-test/src/test/resources/timeStampFormatData1.csv b/integration/spark/src/test/resources/timeStampFormatData1.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/timeStampFormatData1.csv
rename to integration/spark/src/test/resources/timeStampFormatData1.csv
diff --git a/integration/spark-common-test/src/test/resources/timeStampFormatData2.csv b/integration/spark/src/test/resources/timeStampFormatData2.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/timeStampFormatData2.csv
rename to integration/spark/src/test/resources/timeStampFormatData2.csv
diff --git a/integration/spark-common-test/src/test/resources/timeseriestest.csv b/integration/spark/src/test/resources/timeseriestest.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/timeseriestest.csv
rename to integration/spark/src/test/resources/timeseriestest.csv
diff --git a/integration/spark-common-test/src/test/resources/timestamp.csv b/integration/spark/src/test/resources/timestamp.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/timestamp.csv
rename to integration/spark/src/test/resources/timestamp.csv
diff --git a/integration/spark-common-test/src/test/resources/timestampdata.csv b/integration/spark/src/test/resources/timestampdata.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/timestampdata.csv
rename to integration/spark/src/test/resources/timestampdata.csv
diff --git a/integration/spark-common-test/src/test/resources/timestampdatafile.csv b/integration/spark/src/test/resources/timestampdatafile.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/timestampdatafile.csv
rename to integration/spark/src/test/resources/timestampdatafile.csv
diff --git a/integration/spark-common-test/src/test/resources/tpch/customers.csv b/integration/spark/src/test/resources/tpch/customers.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/tpch/customers.csv
rename to integration/spark/src/test/resources/tpch/customers.csv
diff --git a/integration/spark-common-test/src/test/resources/tpch/lineitem.csv b/integration/spark/src/test/resources/tpch/lineitem.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/tpch/lineitem.csv
rename to integration/spark/src/test/resources/tpch/lineitem.csv
diff --git a/integration/spark-common-test/src/test/resources/tpch/nation.csv b/integration/spark/src/test/resources/tpch/nation.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/tpch/nation.csv
rename to integration/spark/src/test/resources/tpch/nation.csv
diff --git a/integration/spark-common-test/src/test/resources/tpch/orders.csv b/integration/spark/src/test/resources/tpch/orders.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/tpch/orders.csv
rename to integration/spark/src/test/resources/tpch/orders.csv
diff --git a/integration/spark-common-test/src/test/resources/tpch/region.csv b/integration/spark/src/test/resources/tpch/region.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/tpch/region.csv
rename to integration/spark/src/test/resources/tpch/region.csv
diff --git a/integration/spark-common-test/src/test/resources/tpch/supplier.csv b/integration/spark/src/test/resources/tpch/supplier.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/tpch/supplier.csv
rename to integration/spark/src/test/resources/tpch/supplier.csv
diff --git a/integration/spark-common-test/src/test/resources/unicodechar.csv b/integration/spark/src/test/resources/unicodechar.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/unicodechar.csv
rename to integration/spark/src/test/resources/unicodechar.csv
diff --git a/integration/spark-common-test/src/test/resources/uniq.csv b/integration/spark/src/test/resources/uniq.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/uniq.csv
rename to integration/spark/src/test/resources/uniq.csv
diff --git a/integration/spark-common-test/src/test/resources/uniqwithoutheader.csv b/integration/spark/src/test/resources/uniqwithoutheader.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/uniqwithoutheader.csv
rename to integration/spark/src/test/resources/uniqwithoutheader.csv
diff --git a/integration/spark-datasource/src/test/resources/vardhandaterestruct.csv b/integration/spark/src/test/resources/vardhandaterestruct.csv
similarity index 100%
rename from integration/spark-datasource/src/test/resources/vardhandaterestruct.csv
rename to integration/spark/src/test/resources/vardhandaterestruct.csv
diff --git a/integration/spark-common-test/src/test/resources/verticalDelimitedData.csv b/integration/spark/src/test/resources/verticalDelimitedData.csv
similarity index 100%
rename from integration/spark-common-test/src/test/resources/verticalDelimitedData.csv
rename to integration/spark/src/test/resources/verticalDelimitedData.csv
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala b/integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
similarity index 98%
rename from integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
index 726ed42..9622d77 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFunctionSuite.scala
@@ -5,17 +5,15 @@ import java.util.{Random, UUID}
 
 import org.apache.commons.io.FileUtils
 import org.apache.spark.sql.{CarbonEnv, SaveMode}
-import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.apache.spark.sql.test.SparkTestQueryExecutor
 import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Ignore}
+import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 
 import org.apache.carbondata.core.constants.{CarbonCommonConstants, CarbonV3DataFormatConstants}
 import org.apache.carbondata.core.datamap.status.DataMapStatusManager
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
-import org.apache.carbondata.datamap.bloom.BloomCoarseGrainDataMapTestUtil.deleteFile
-import org.apache.carbondata.datamap.bloom.BloomCoarseGrainDataMapTestUtil.createFile
-import org.apache.carbondata.datamap.bloom.BloomCoarseGrainDataMapTestUtil.checkBasicQuery
+import org.apache.carbondata.datamap.bloom.BloomCoarseGrainDataMapTestUtil.{checkBasicQuery, createFile, deleteFile}
 
 class BloomCoarseGrainDataMapFunctionSuite  extends QueryTest with BeforeAndAfterAll with BeforeAndAfterEach {
   val bigFile = s"$resourcesPath/bloom_datamap_function_test_big.csv"
@@ -24,7 +22,6 @@ class BloomCoarseGrainDataMapFunctionSuite  extends QueryTest with BeforeAndAfte
   val dataMapName = "bloom_dm"
 
   override protected def beforeAll(): Unit = {
-    sqlContext.sparkContext.setLogLevel("info")
     deleteFile(bigFile)
     new File(CarbonProperties.getInstance().getSystemFolderLocation).delete()
     createFile(bigFile, line = 2000)
@@ -775,7 +772,7 @@ class BloomCoarseGrainDataMapFunctionSuite  extends QueryTest with BeforeAndAfte
     sql(s"INSERT INTO $bloomDMSampleTable SELECT 'c1v2', 2, 'c3v2'")
 
     // two segments both has datamap files
-    val carbonTable = CarbonEnv.getCarbonTable(Option("default"), bloomDMSampleTable)(Spark2TestQueryExecutor.spark)
+    val carbonTable = CarbonEnv.getCarbonTable(Option("default"), bloomDMSampleTable)(SparkTestQueryExecutor.spark)
     import scala.collection.JavaConverters._
     (0 to 1).foreach { segId =>
       val datamapPath = CarbonTablePath.getDataMapStorePath(carbonTable.getTablePath, segId.toString, dataMapName)
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapSuite.scala b/integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapSuite.scala
similarity index 100%
rename from integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapSuite.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapSuite.scala
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapTestUtil.scala b/integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapTestUtil.scala
similarity index 100%
rename from integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapTestUtil.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapTestUtil.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapSuite.scala b/integration/spark/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapSuite.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapSuite.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapSuite.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala b/integration/spark/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/geo/GeoTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/geo/GeoTest.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/geo/GeoTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/geo/GeoTest.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
index 27be08b..b0d482a 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestBigInt.scala
@@ -92,10 +92,11 @@ class TestBigInt extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table if exists carbonTable")
     sql("drop table if exists hiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 }
 
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
similarity index 97%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
index e7758a1..b4bdd57 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/bigdecimal/TestDimensionWithDecimalDataType.scala
@@ -55,9 +55,10 @@ class TestDimensionWithDecimalDataType extends QueryTest with BeforeAndAfterAll
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table if exists carbonTable")
     sql("drop table if exists hiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
index c54d3cb..905552c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/binary/TestBinaryDataType.scala
@@ -1662,6 +1662,7 @@ class TestBinaryDataType extends QueryTest with BeforeAndAfterAll {
     }
 
     override def afterAll: Unit = {
+        sqlContext.sparkSession.conf.unset("hive.exec.dynamic.partition.mode")
         sql("DROP TABLE IF EXISTS binaryTable")
         sql("DROP TABLE IF EXISTS hiveTable")
         sql("DROP TABLE IF EXISTS hive_table")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
index 3855ce2..ca24a07 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
@@ -23,11 +23,18 @@ import scala.collection.mutable
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
-trait TestAdaptiveComplexType extends QueryTest {
+trait TestAdaptiveComplexType extends QueryTest with BeforeAndAfterAll {
+
+  override def afterAll(): Unit = {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+  }
 
   test("test INT with struct and array, Encoding INT-->BYTE") {
     sql("Drop table if exists adaptive")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
index f731c4b..1274497 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
@@ -44,10 +44,12 @@ class TestAdaptiveEncodingForNullValues
   }
 
   override def afterAll(): Unit = {
-    sql("DROP TABLE IF EXISTS adaptive")
     CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
       .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
         "true")
+    sql("DROP TABLE IF EXISTS adaptive")
   }
 
   test("test INT with struct and array, Encoding INT-->BYTE") {
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeHeapColumnPageForComplexDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeHeapColumnPageForComplexDataType.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeHeapColumnPageForComplexDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeHeapColumnPageForComplexDataType.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAllComplexDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAllComplexDataType.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAllComplexDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAllComplexDataType.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
index ce2dd75..e83956e 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCompactionComplexType.scala
@@ -40,9 +40,11 @@ class TestCompactionComplexType extends QueryTest with BeforeAndAfterAll {
   }
 
   override protected def afterAll(): Unit = {
-    sql("DROP TABLE IF EXISTS compactComplex")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.COMPACTION_SEGMENT_LEVEL_THRESHOLD, compactionThreshold)
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    sql("DROP TABLE IF EXISTS compactComplex")
   }
 
   test("test INT with struct and array, Encoding INT-->BYTE") {
@@ -1107,7 +1109,7 @@ class TestCompactionComplexType extends QueryTest with BeforeAndAfterAll {
       "DEBIT_COUNT,CREDIT_COUNT, DEPOSIT,HQ_DEPOSIT','COMPLEX_DELIMITER_LEVEL_1'='$', " +
       "'COMPLEX_DELIMITER_LEVEL_2'='&')")
     sql("ALTER TABLE compactComplex COMPACT 'major'")
-    checkAnswer(sql("Select count(*) from compactComplex"), Row(30))
+    checkAnswer(sql("Select count(*) from compactComplex"), Row(63))
   }
 
   test("Test Compaction for complex types with table restructured") {
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
index d0c4851..34b71eb 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexDataType.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.{AnalysisException, Row}
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
+import org.apache.carbondata.common.constants.LoggerAction
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
@@ -37,9 +37,6 @@ import org.apache.carbondata.core.util.CarbonProperties
 
 class TestComplexDataType extends QueryTest with BeforeAndAfterAll {
 
-  val badRecordAction = CarbonProperties.getInstance()
-    .getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION)
-
   val hugeBinary = RandomStringUtils.randomAlphabetic(33000)
 
   override def beforeAll(): Unit = {
@@ -50,17 +47,16 @@ class TestComplexDataType extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll(): Unit = {
-    sql("DROP TABLE IF EXISTS table1")
-    sql("DROP TABLE IF EXISTS test")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "false")
-    CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
         CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction)
-    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, LoggerAction.FORCE.name())
       .removeProperty(CarbonCommonConstants.COMPLEX_DELIMITERS_LEVEL_1)
+    sql("DROP TABLE IF EXISTS table1")
+    sql("DROP TABLE IF EXISTS test")
   }
 
   test("test Projection PushDown for Struct - Integer type") {
@@ -138,8 +134,6 @@ class TestComplexDataType extends QueryTest with BeforeAndAfterAll {
     sql("insert into table1 values(array(''))")
     checkAnswer(sql("select detail[0] from table1"), Seq(Row("")))
     sql("drop table if exists table1")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction)
   }
 
   test("test Projection PushDown for Struct - Array type when Array is Empty") {
@@ -151,8 +145,6 @@ class TestComplexDataType extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql("select person.detail[0] from table1"), Seq(Row("")))
     checkAnswer(sql("select person.age from table1"), Seq(Row(1)))
     sql("drop table if exists table1")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, badRecordAction)
   }
 
   test("test Projection PushDown for Struct - Double type") {
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
index c8850bf..a2e9be3 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
@@ -287,6 +287,8 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat)
     sql("drop table if exists complexcarbontable")
     sql("drop table if exists complexhivetable")
     sql("drop table if exists structusingstructCarbon")
@@ -295,7 +297,5 @@ class TestComplexTypeQuery extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists structusingarrayhive")
     sql("drop table if exists complex_filter")
     sql("drop table if exists carbon_table")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat)
   }
 }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeWithBigArray.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeWithBigArray.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeWithBigArray.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeWithBigArray.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestCreateTableWithDouble.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/MultiFilesDataLoagdingTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/MultiFilesDataLoagdingTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/MultiFilesDataLoagdingTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/MultiFilesDataLoagdingTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithAutoLoadMerge.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithAutoLoadMerge.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithAutoLoadMerge.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithAutoLoadMerge.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
index ecd8e0f..54fcaab 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithBlankLine.scala
@@ -51,7 +51,6 @@ class TestLoadDataWithBlankLine extends QueryTest with BeforeAndAfterAll {
   test("test carbon table data loading when the first line is blank") {
     sql(s"LOAD DATA LOCAL INPATH '${resourcesPath}/dataWithNullFirstLine.csv' INTO TABLE " +
       "carbontable2 OPTIONS('DELIMITER'= ',','FILEHEADER'='empno,empname,designation,doj,workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance,utilization,salary')")
-
     checkAnswer(sql("select count(*) from carbontable2"),
       Seq(Row(11)))
   }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
index 8236635..b982149 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
@@ -167,7 +167,7 @@ class CustomizeCompressor extends Compressor {
 class TestLoadDataWithCompression extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
   private val tableName = "load_test_with_compressor"
   private var executorService: ExecutorService = _
-  private val csvDataDir = s"$integrationPath/spark2/target/csv_load_compression"
+  private val csvDataDir = s"$integrationPath/spark/target/csv_load_compression"
   private val compressors = Array("snappy","zstd","gzip")
 
   override protected def beforeAll(): Unit = {
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
similarity index 97%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
index 290006d..73ffd86 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithEmptyArrayColumns.scala
@@ -56,8 +56,10 @@ class TestLoadDataWithEmptyArrayColumns extends QueryTest with BeforeAndAfterAll
   }
 
   override def afterAll {
-    sql("drop table nest13")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    sql("drop table nest13")
+
   }
 }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
similarity index 96%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
index 8d26346..b754664 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithJunkChars.scala
@@ -28,7 +28,7 @@ class TestLoadDataWithJunkChars extends QueryTest with BeforeAndAfterAll {
   val junkchars = "ǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰ"
 
   def buildTestData() = {
-    filePath = s"$integrationPath/spark-common-test/target/junkcharsdata.csv"
+    filePath = s"$integrationPath/spark/target/junkcharsdata.csv"
     val file = new File(filePath)
     val writer = new BufferedWriter(new FileWriter(file))
     writer.write("c1,c2\n")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinBigInt.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinBigInt.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinBigInt.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinBigInt.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithNullMeasures.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithNullMeasures.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithNullMeasures.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithNullMeasures.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithUnsafeMemory.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithUnsafeMemory.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithUnsafeMemory.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithUnsafeMemory.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithYarnLocalDirs.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithYarnLocalDirs.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithYarnLocalDirs.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithYarnLocalDirs.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
index 5f867d6..b320510 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
@@ -299,6 +299,9 @@ class TestNoInvertedIndexLoadAndQuery extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table if exists index1")
     sql("drop table if exists index2")
     sql("drop table if exists indexFormat")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
similarity index 97%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
index 90e493a..808466c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestCSVHavingOnlySpaceChar.scala
@@ -52,8 +52,9 @@ class TestCSVHavingOnlySpaceChar extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
-    sql("drop table emptyRowCarbonTable")
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    sql("drop table emptyRowCarbonTable")
   }
 }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
similarity index 97%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
index 1bd83ff..00b2c72 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestEmptyRows.scala
@@ -81,9 +81,10 @@ class TestEmptyRows extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table emptyRowCarbonTable")
     sql("drop table emptyRowHiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestSkipEmptyLines.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestSkipEmptyLines.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestSkipEmptyLines.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/emptyrow/TestSkipEmptyLines.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/ArrayDataTypeTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/ArrayDataTypeTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/ArrayDataTypeTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/ArrayDataTypeTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/DoubleDataTypeTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/DoubleDataTypeTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/DoubleDataTypeTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/DoubleDataTypeTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/FloatDataTypeTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/FloatDataTypeTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/FloatDataTypeTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/FloatDataTypeTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/MapDataTypeTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/MapDataTypeTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/MapDataTypeTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/MapDataTypeTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
index ba02c1f..1de3762 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/primitiveTypes/TestAdaptiveEncodingForPrimitiveTypes.scala
@@ -351,7 +351,7 @@ class TestAdaptiveEncodingForPrimitiveTypes extends QueryTest with BeforeAndAfte
   test("test filter queries on adaptive encoded column with complex column in the schema") {
     sql("drop table if exists complexTable")
     sql("CREATE TABLE complexTable( id LONG,name STRING,salary FLOAT,file struct<school:array<string>, age:int>) STORED AS carbondata TBLPROPERTIES('sort_columns'='id,name')")
-    sql(s"LOAD DATA INPATH '$rootPath/examples/spark2/src/main/resources/streamSample.csv' INTO TABLE complexTable OPTIONS('HEADER'='TRUE')")
+    sql(s"LOAD DATA INPATH '$rootPath/examples/spark/src/main/resources/streamSample.csv' INTO TABLE complexTable OPTIONS('HEADER'='TRUE')")
     checkAnswer(sql("select id,name, salary from complexTable"),
       Seq(Row(100000001, "batch_1", 0.1),
         Row(100000002, "batch_2", 0.2),
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
similarity index 95%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
index e27b029..3a2dc98 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/TestCarbonCli.scala
@@ -17,10 +17,10 @@
 package org.apache.carbondata.spark.testsuite
 
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.common.util.Spark2QueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class TestCarbonCli extends Spark2QueryTest with BeforeAndAfterAll{
+class TestCarbonCli extends QueryTest with BeforeAndAfterAll{
 
   override protected def beforeAll(): Unit = {
     sql("drop table if exists OneRowTable")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
index a234cf1..e8abeda 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
@@ -19,28 +19,26 @@ package org.apache.carbondata.spark.testsuite.addsegment
 import java.io.File
 import java.nio.file.{Files, Paths}
 
+import scala.io.Source
+
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.util.SparkSQLUtil
 import org.apache.spark.sql.{AnalysisException, CarbonEnv, Row}
-import org.apache.spark.sql.{CarbonEnv, Row}
+import org.junit.Assert
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.common.Strings
+import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, CarbonFileFilter}
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.datastore.row.CarbonRow
-import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.core.metadata.datatype.DataTypes
+import org.apache.carbondata.core.util.{CarbonProperties, ThreadLocalSessionInfo}
 import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.hadoop.readsupport.impl.CarbonRowReadSupport
-import org.apache.carbondata.sdk.file.{Field, Schema}
-import org.apache.carbondata.sdk.file.{CarbonReader, CarbonWriter}
-import org.junit.Assert
-import scala.io.Source
-
-import org.apache.carbondata.common.Strings
-import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, CarbonFileFilter}
-import org.apache.carbondata.core.metadata.datatype.DataTypes
+import org.apache.carbondata.sdk.file.{CarbonReader, CarbonWriter, Field, Schema}
 
 class AddSegmentTestCase extends QueryTest with BeforeAndAfterAll {
 
@@ -411,8 +409,8 @@ class AddSegmentTestCase extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql("select count(*) from addsegment1"), Seq(Row(30)))
 
     sql(s"alter table addsegment1 add segment options('path'='$newPath', 'format'='PARQUET')").show()
-    checkExistence(sql(s"show segments for table addsegment1"), true, "spark-common/target/warehouse/addsegtest")
-    checkExistence(sql(s"show history segments for table addsegment1"), true, "spark-common/target/warehouse/addsegtest")
+    checkExistence(sql(s"show segments for table addsegment1"), true, "spark/target/warehouse/addsegtest")
+    checkExistence(sql(s"show history segments for table addsegment1"), true, "spark/target/warehouse/addsegtest")
     FileFactory.deleteAllFilesOfDir(new File(newPath))
   }
 
@@ -520,8 +518,8 @@ class AddSegmentTestCase extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql("select * from carbon_table"), sql("select * from parquet_table"))
 
     // test show segment
-    checkExistence(sql(s"show segments for table carbon_table"), true, "spark-common/target/warehouse/parquet_table")
-    checkExistence(sql(s"show history segments for table carbon_table"), true, "spark-common/target/warehouse/parquet_table")
+    checkExistence(sql(s"show segments for table carbon_table"), true, "spark/target/warehouse/parquet_table")
+    checkExistence(sql(s"show history segments for table carbon_table"), true, "spark/target/warehouse/parquet_table")
 
     sql("drop table if exists parquet_table")
     sql("drop table if exists carbon_table")
@@ -845,6 +843,11 @@ class AddSegmentTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll = {
+    defaultConfig()
+    sqlContext.sparkSession.conf.unset("carbon.input.segments.default.addsegment1")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     dropTable
   }
 
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
similarity index 96%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
index d2bc970..c08d35a 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
@@ -100,6 +100,9 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("DROP TABLE IF EXISTS alldatatypestableAGG")
     sql("DROP TABLE IF EXISTS alldatatypesAGG_hive")
   }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
index ffc356c..5c3f80c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/aggquery/AverageQueryTestCase.scala
@@ -106,10 +106,11 @@ class AverageQueryTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table carbonTable")
     sql("drop table hiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 
 }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
index 68c0b84..442e751 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
@@ -52,8 +52,6 @@ class AllDataTypesTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll {
     clean
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 
   //Test-22
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
similarity index 90%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
index 281e353..7dafaf7 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
@@ -17,10 +17,10 @@
 package org.apache.carbondata.spark.testsuite.allqueries
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.Spark2QueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class InsertIntoCarbonTableSpark2TestCase extends Spark2QueryTest with BeforeAndAfterAll {
+class InsertIntoCarbonTableSpark2TestCase extends QueryTest with BeforeAndAfterAll {
   override def beforeAll: Unit = {
     sql("drop table if exists OneRowTable")
   }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
index 49081ef..899b38b 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
@@ -35,7 +35,7 @@ class MeasureOnlyTableTestCases extends QueryTest with BeforeAndAfterAll {
 
   val rootPath = new File(this.getClass.getResource("/").getPath
     + "../../../..").getCanonicalPath
-  val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+  val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
   override def beforeAll {
     clean
     sql(s"""
@@ -50,7 +50,7 @@ class MeasureOnlyTableTestCases extends QueryTest with BeforeAndAfterAll {
                | STORED AS carbondata
              """.stripMargin)
 
-          val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+          val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
 
           sql(
             s"""
@@ -75,8 +75,6 @@ class MeasureOnlyTableTestCases extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll {
     clean
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 
   test("SELECT sum(intField) FROM carbon_table where intField > 10") {
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableAddColumns.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableCompactionLevelThreshold.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableCompactionLevelThreshold.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableCompactionLevelThreshold.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableCompactionLevelThreshold.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
similarity index 98%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
index ba9d213..7858fe8 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableSortColumnsProperty.scala
@@ -22,6 +22,7 @@ import java.io.{ByteArrayOutputStream, PrintStream}
 import org.apache.spark.sql.{CarbonEnv, Row}
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
+
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
@@ -41,6 +42,7 @@ class TestAlterTableSortColumnsProperty extends QueryTest with BeforeAndAfterAll
   }
 
   override def afterAll(): Unit = {
+    defaultConfig()
     dropTable()
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
       CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
@@ -390,6 +392,8 @@ class TestAlterTableSortColumnsProperty extends QueryTest with BeforeAndAfterAll
     }
     sql(s"set carbon.input.segments.default.$tableName=*").collect()
     sql(s"set carbon.input.segments.default.$baseTableName=*").collect()
+    sqlContext.sparkSession.conf.unset(s"carbon.input.segments.default.$tableName")
+    sqlContext.sparkSession.conf.unset(s"carbon.input.segments.default.$baseTableName")
 
     // query
     checkAnswer(sql(s"select count(*) from $tableName"), sql(s"select count(*) from $baseTableName"))
@@ -469,6 +473,8 @@ class TestAlterTableSortColumnsProperty extends QueryTest with BeforeAndAfterAll
     }
     sql(s"set carbon.input.segments.default.$tableName=*").collect()
     sql(s"set carbon.input.segments.default.$baseTableName=*").collect()
+    sqlContext.sparkSession.conf.unset(s"carbon.input.segments.default.$tableName")
+    sqlContext.sparkSession.conf.unset(s"carbon.input.segments.default.$baseTableName")
 
     // no_sort compaction flow for column drift
     sql(s"alter table $tableName set tblproperties('sort_scope'='no_sort', 'sort_columns'='charField, intField')")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
index fdf43e7..7285101 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
@@ -35,6 +35,7 @@ class BadRecordActionTest extends QueryTest {
   initCarbonProperties
 
   private def initCarbonProperties = {
+    defaultConfig()
     CarbonProperties.getInstance().addProperty(
       CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, LoggerAction.FORCE.name())
     badRecordFilePath.mkdirs()
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
similarity index 96%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
index 6292802..46f6cf9 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
@@ -36,9 +36,6 @@ import org.apache.carbondata.common.constants.LoggerAction
 class BadRecordEmptyDataTest extends QueryTest with BeforeAndAfterAll {
   var hiveContext: HiveContext = _
 
-  val bad_records_action = CarbonProperties.getInstance()
-    .getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION)
-
   override def beforeAll {
     try {
       sql("drop table IF EXISTS emptyColumnValues")
@@ -133,6 +130,7 @@ class BadRecordEmptyDataTest extends QueryTest with BeforeAndAfterAll {
   }
 
   test("select count(*) from empty_timestamp_false") {
+    sql("select count(*) from empty_timestamp_false").show(100, false)
     checkAnswer(
       sql("select count(*) from empty_timestamp_false"),
       Seq(Row(7)
@@ -171,17 +169,14 @@ class BadRecordEmptyDataTest extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table IF EXISTS emptyColumnValues")
     sql("drop table IF EXISTS emptyColumnValues_false")
     sql("drop table IF EXISTS empty_timestamp")
     sql("drop table IF EXISTS empty_timestamp_false")
     sql("drop table IF EXISTS dataloadOptionTests")
     sql("drop table IF EXISTS bigtab")
-
-    CarbonProperties.getInstance().addProperty(
-      CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
-      bad_records_action)
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 }
\ No newline at end of file
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
similarity index 99%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
index 650d7f1..9ccfd84 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
@@ -37,6 +37,7 @@ import org.apache.carbondata.core.datastore.impl.FileFactory
 class BadRecordLoggerTest extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
+    defaultConfig()
     try {
       sql("drop table IF EXISTS sales")
       sql("drop table IF EXISTS serializable_values")
@@ -341,6 +342,9 @@ class BadRecordLoggerTest extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table IF EXISTS sales")
     sql("drop table IF EXISTS sales_test")
     sql("drop table IF EXISTS serializable_values")
@@ -353,7 +357,5 @@ class BadRecordLoggerTest extends QueryTest with BeforeAndAfterAll {
     sql("drop table IF EXISTS empty_timestamp_false")
     sql("drop table IF EXISTS dataloadOptionTests")
     sql("drop table IF EXISTS loadIssue")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 }
\ No newline at end of file
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
similarity index 97%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
index fd2afa7..2f3eb9c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
@@ -205,18 +205,19 @@ class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+      .addProperty(CarbonCommonConstants.SORT_SIZE,
+        CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL)
+      .addProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT,
+        CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT_DEFAULT_VALUE)
     sql("drop table if exists carbonTable")
     sql("drop table if exists hiveTable")
     sql("drop table if exists hiveBigDecimal")
     sql("drop table if exists carbonBigDecimal_2")
     sql("DROP TABLE IF EXISTS decimal_int_test")
     sql("drop table if exists carbonBigDecimal_3")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_SIZE,
-      CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL)
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT,
-      CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT_DEFAULT_VALUE)
   }
 }
 
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
similarity index 97%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
index 8fcac34..1929951 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
@@ -108,8 +108,6 @@ class TestNullAndEmptyFields extends QueryTest with BeforeAndAfterAll {
   override def afterAll {
     sql("drop table if exists carbonTable")
     sql("drop table if exists hiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
   }
 }
 
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
similarity index 94%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
index abf2a7e..86e4a7a 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFieldsUnsafe.scala
@@ -105,12 +105,11 @@ class TestNullAndEmptyFieldsUnsafe extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_DEFAULT)
     sql("drop table if exists carbonTable")
     sql("drop table if exists hiveTable")
-    CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-        .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
-          CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_DEFAULT)
   }
 }
 
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
similarity index 100%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
similarity index 95%
rename from integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
index 34cab19..84ac989 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/CarbonCustomBlockDistributionTest.scala
@@ -96,8 +96,10 @@ class CarbonCustomBlockDistributionTest extends QueryTest with BeforeAndAfterAll
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_CUSTOM_BLOCK_DISTRIBUTION,
+        CarbonCommonConstants.CARBON_CUSTOM_BLOCK_DISTRIBUTION_DEFAULT)
     // delete the temp data file
-    CarbonProperties.getInstance().addProperty("carbon.custom.distribution","false")
     try {
       val file = FileFactory.getCarbonFile(outputPath)
       if (file.exists()) {
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
similarity index 97%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
index a5b0292..08d6327 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBaseTest.scala
@@ -16,7 +16,7 @@
  */
 package org.apache.carbondata.spark.testsuite.booleantype
 
-import org.apache.spark.sql.Row
+import org.apache.spark.sql.{CarbonEnv, Row}
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 
@@ -168,7 +168,7 @@ class BooleanDataTypesBaseTest extends QueryTest with BeforeAndAfterEach with Be
     checkAnswer(sql("select a1 from carbon_table"), Seq(Row(false), Row(true)))
     sql("set spark.sql.codegen.wholestage=true")
     checkAnswer(sql("select a1 from carbon_table"), Seq(Row(false), Row(true)))
-    sql("reset")
     sql("drop table if exists carbon_table")
+    CarbonEnv.getInstance(sqlContext.sparkSession).carbonSessionInfo.getSessionParams.clear()
   }
 }
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
similarity index 99%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
index c3603a3..d9c71b6 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala
@@ -45,8 +45,8 @@ class BooleanDataTypesBigFileTest extends QueryTest with BeforeAndAfterEach with
     assert(BooleanFile.deleteFile(pathOfOnlyBoolean))
   }
 
-  val pathOfManyDataType = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBigFile.csv"
-  val pathOfOnlyBoolean = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBigFileOnlyBoolean.csv"
+  val pathOfManyDataType = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBigFile.csv"
+  val pathOfOnlyBoolean = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBigFileOnlyBoolean.csv"
   val trueNum = 10000
 
   override def beforeAll(): Unit = {
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
similarity index 96%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
index db35dd0..c26ca63 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesFilterTest.scala
@@ -19,18 +19,19 @@ package org.apache.carbondata.spark.testsuite.booleantype
 import java.io.File
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.apache.spark.sql.test.SparkTestQueryExecutor
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 
 class BooleanDataTypesFilterTest extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
+    defaultConfig()
     sql("drop table if exists carbon_table")
     sql("drop table if exists boolean_table")
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanOnlyBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanOnlyBoolean.csv"
 
     sql("CREATE TABLE if not exists carbon_table(booleanField BOOLEAN) STORED AS carbondata")
     sql(
@@ -40,7 +41,7 @@ class BooleanDataTypesFilterTest extends QueryTest with BeforeAndAfterEach with
          | OPTIONS('FILEHEADER' = 'booleanField')
        """.stripMargin)
 
-    val booleanLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val booleanLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     sql(
       s"""
          | CREATE TABLE boolean_table(
@@ -79,7 +80,7 @@ class BooleanDataTypesFilterTest extends QueryTest with BeforeAndAfterEach with
     checkAnswer(sql("select count(*) from carbon_table where booleanField = true"),
       Row(4))
 
-    if (Spark2TestQueryExecutor.spark.version.startsWith("2.1")) {
+    if (SparkTestQueryExecutor.spark.version.startsWith("2.1")) {
       checkAnswer(sql("select count(*) from carbon_table where booleanField = 'true'"),
         Row(0))
 
@@ -346,7 +347,7 @@ class BooleanDataTypesFilterTest extends QueryTest with BeforeAndAfterEach with
     sql("drop table if exists boolean_table2")
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val booleanLocation2 = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val booleanLocation2 = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     sql(
       s"""
          | CREATE TABLE boolean_table2(
@@ -388,7 +389,7 @@ class BooleanDataTypesFilterTest extends QueryTest with BeforeAndAfterEach with
     sql("drop table if exists boolean_table2")
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val booleanLocation2 = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val booleanLocation2 = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     sql(
       s"""
          | CREATE TABLE boolean_table2(
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
similarity index 97%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
index 16cb223..a06b867 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesInsertTest.scala
@@ -202,7 +202,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -294,7 +294,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -364,7 +364,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -421,7 +421,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
       val rootPath = new File(this.getClass.getResource("/").getPath
         + "../../../..").getCanonicalPath
-      val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+      val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
       sql(
         s"""
@@ -477,7 +477,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -554,7 +554,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -764,7 +764,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -854,7 +854,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -928,7 +928,7 @@ class BooleanDataTypesInsertTest extends QueryTest with BeforeAndAfterEach with
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
similarity index 92%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
index 5f8f52c..e1fab7b 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesLoadTest.scala
@@ -21,7 +21,7 @@ import java.io.File
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.apache.spark.sql.test.SparkTestQueryExecutor
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 
@@ -78,6 +78,9 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   override def afterAll(): Unit = {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("drop table if exists carbon_table")
     sql("drop table if exists boolean_table")
     sql("drop table if exists boolean_table2")
@@ -87,7 +90,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: support boolean data type format") {
-    val fileLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanOnlyBoolean.csv"
+    val fileLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanOnlyBoolean.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '$fileLocation'
@@ -103,7 +106,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: support boolean data type format, different format") {
-    val fileLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanDifferentFormat.csv"
+    val fileLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanDifferentFormat.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '$fileLocation'
@@ -120,7 +123,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: support boolean and other data type") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -137,7 +140,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: data columns is less than table defined columns") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -165,7 +168,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | STORED AS carbondata
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -204,7 +207,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('sort_columns'='')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanWithFileHeader.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanWithFileHeader.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -242,7 +245,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('TABLE_BLOCKSIZE'='512','NO_INVERTED_INDEX'='charField', 'SORT_SCOPE'='GLOBAL_SORT')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -275,7 +278,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
     checkAnswer(sql("select count(*) from boolean_table where booleanField = false or booleanField = true"),
       Row(10))
 
-    if (Spark2TestQueryExecutor.spark.version.startsWith("2.1")) {
+    if (SparkTestQueryExecutor.spark.version.startsWith("2.1")) {
       checkAnswer(sql("select count(*) from boolean_table where booleanField = 'true'"),
         Row(0))
 
@@ -332,7 +335,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('TABLE_BLOCKSIZE'='512','NO_INVERTED_INDEX'='charField', 'SORT_SCOPE'='GLOBAL_SORT')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanWithFileHeader.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanWithFileHeader.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -365,7 +368,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
     checkAnswer(sql("select count(*) from boolean_table where booleanField = false or booleanField = true"),
       Row(10))
 
-    if (Spark2TestQueryExecutor.spark.version.startsWith("2.1")) {
+    if (SparkTestQueryExecutor.spark.version.startsWith("2.1")) {
       checkAnswer(sql("select count(*) from boolean_table where booleanField = 'true'"),
         Row(0))
 
@@ -400,7 +403,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: bad_records_action is FORCE") {
-    val fileLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanDifferentFormat.csv"
+    val fileLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanDifferentFormat.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '$fileLocation'
@@ -419,7 +422,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: bad_records_action is FORCE, support boolean and other data type") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -436,7 +439,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: bad_records_action is IGNORE, support boolean and other data type") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -451,7 +454,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: bad_records_action is REDIRECT, support boolean and other data type") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -466,7 +469,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading table: bad_records_action is FAIL") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv"
     val exception_insert: Exception = intercept[Exception] {
       sql(
         s"""
@@ -527,7 +530,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
 
     sql(
       s"""
@@ -582,7 +585,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading overwrite: support boolean data type format, different format") {
-    val fileLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanDifferentFormat.csv"
+    val fileLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanDifferentFormat.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '$fileLocation'
@@ -607,7 +610,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
   }
 
   test("Loading overwrite: support boolean and other data type") {
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -650,7 +653,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('sort_columns'='')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -710,7 +713,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('sort_columns'='')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -748,7 +751,7 @@ class BooleanDataTypesLoadTest extends QueryTest with BeforeAndAfterEach with Be
          | )
          | STORED AS carbondata
        """.stripMargin)
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBadRecords.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanBadRecords.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
similarity index 98%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
index 6759f79..d70bfef 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala
@@ -141,7 +141,7 @@ class BooleanDataTypesParameterTest extends QueryTest with BeforeAndAfterEach wi
          | TBLPROPERTIES('sort_columns'='')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBoolean.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
     for (i <- 0 until 4) {
       sql(
         s"""
@@ -186,7 +186,7 @@ class BooleanDataTypesParameterTest extends QueryTest with BeforeAndAfterEach wi
          | TBLPROPERTIES('sort_columns'='')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     for (i <- 0 until 4) {
       sql(
         s"""
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
similarity index 95%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
index 65fb4cd..cf37417 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesSortTest.scala
@@ -67,7 +67,7 @@ class BooleanDataTypesSortTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('sort_columns'='booleanField')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
@@ -121,7 +121,7 @@ class BooleanDataTypesSortTest extends QueryTest with BeforeAndAfterEach with Be
          | TBLPROPERTIES('sort_columns'='shortField,booleanField,booleanField2')
        """.stripMargin)
 
-    val storeLocation = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
+    val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
     sql(
       s"""
          | LOAD DATA LOCAL INPATH '${storeLocation}'
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
similarity index 89%
rename from integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
rename to integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
index c559e93..8cd7731 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/compress/TestBooleanCompressSuite.scala
@@ -26,11 +26,7 @@ import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
 
-import org.apache.carbondata.core.util.CarbonProperties
-
 class TestBooleanCompressSuite extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
-  val rootPath = new File(this.getClass.getResource("/").getPath
-    + "../..").getCanonicalPath
 
   override def beforeEach(): Unit = {
     sql("drop table if exists boolean_table")
@@ -41,13 +37,11 @@ class TestBooleanCompressSuite extends QueryTest with BeforeAndAfterEach with Be
     assert(deleteFile(randomBoolean))
   }
 
-  val randomBoolean = s"$rootPath/src/test/resources/bool/supportRandomBooleanBigFile.csv"
+  val randomBoolean = s"$resourcesPath/bool/supportRandomBooleanBigFile.csv"
   val trueNum = 10000000
... 6976 lines suppressed ...


[carbondata] 01/02: [Re-factory] Re-factory modules

Posted by ja...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

jackylk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git

commit 85d333c9f20bee03cdcc81d3f9e95836a476ddbf
Author: QiangCai <qi...@qq.com>
AuthorDate: Mon Feb 24 17:19:38 2020 +0800

    [Re-factory] Re-factory modules
    
    Why is this PR needed?
    There are too many spark-related modules.
    
    What changes were proposed in this PR?
    1. move carbondata/store to carbondata/sdk
    2. move examples/spark2 to examples/spark
    3. move secondary_index to index/secondary-index
    4. move datamap/mv to mv
    5. move datamap to index
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    No
---
 examples/{spark2 => spark}/pom.xml                 |   4 +--
 .../examples/sdk/CarbonReaderExample.java          |   0
 .../carbondata/examples/sdk/SDKS3Example.java      |   0
 .../carbondata/examples/sdk/SDKS3ReadExample.java  |   0
 .../examples/sdk/SDKS3SchemaReadExample.java       |   0
 .../examples/sql/JavaCarbonSessionExample.java     |   2 +-
 .../src/main/resources/Test_Data1.csv              |   0
 .../src/main/resources/complexdata.csv             |   0
 .../{spark2 => spark}/src/main/resources/data.csv  |   0
 .../{spark2 => spark}/src/main/resources/data1.csv |   0
 .../src/main/resources/dataSample.csv              |   0
 .../src/main/resources/dimSample.csv               |   0
 .../src/main/resources/factSample.csv              |   0
 .../src/main/resources/log4j.properties            |   0
 .../src/main/resources/sample.csv                  |   0
 .../src/main/resources/streamSample.csv            |   0
 .../benchmark/ConcurrentQueryBenchmark.scala       |   2 +-
 .../org/apache/carbondata/benchmark/Query.scala    |   0
 .../carbondata/benchmark/SCDType2Benchmark.scala   |   2 +-
 .../benchmark/SimpleQueryBenchmark.scala           |   2 +-
 .../carbondata/examples/AlluxioExample.scala       |   0
 .../carbondata/examples/AlterTableExample.scala    |   0
 .../examples/CarbonDataFrameExample.scala          |   0
 .../carbondata/examples/CarbonSessionExample.scala |   6 ++--
 .../examples/CarbonSortColumnsExample.scala        |   2 +-
 .../examples/CaseClassDataFrameAPIExample.scala    |   0
 .../examples/CustomCompactionExample.scala         |   2 +-
 .../examples/DataFrameComplexTypeExample.scala     |   0
 .../examples/DataManagementExample.scala           |   2 +-
 .../examples/DataUpdateDeleteExample.scala         |   0
 .../carbondata/examples/DirectSQLExample.scala     |   2 +-
 .../carbondata/examples/ExternalTableExample.scala |   2 +-
 .../carbondata/examples/HadoopFileExample.scala    |   2 +-
 .../apache/carbondata/examples/HiveExample.scala   |  22 ++++++-------
 .../carbondata/examples/LuceneDataMapExample.scala |   0
 .../org/apache/carbondata/examples/MVExample.scala |   0
 .../carbondata/examples/QuerySegmentExample.scala  |   2 +-
 .../apache/carbondata/examples/S3CsvExample.scala  |   4 +--
 .../org/apache/carbondata/examples/S3Example.scala |   2 +-
 .../carbondata/examples/S3UsingSDkExample.scala    |   0
 .../carbondata/examples/SparkSessionExample.scala  |   2 +-
 .../examples/SparkStreamingExample.scala           |   4 +--
 .../examples/StandardPartitionExample.scala        |   2 +-
 .../carbondata/examples/StreamSQLExample.scala     |   0
 .../examples/StreamingUsingBatchLoadExample.scala  |   4 +--
 .../examples/StreamingWithRowParserExample.scala   |   2 +-
 .../examples/StructuredStreamingExample.scala      |   2 +-
 .../TableLevelCompactionOptionExample.scala        |   2 +-
 .../carbondata/examples/util/ExampleUtils.scala    |   8 ++---
 .../apache/carbondata/examplesCI/RunExamples.scala |   2 +-
 {datamap => index}/bloom/pom.xml                   |   2 +-
 .../datamap/bloom/AbstractBloomDataMapWriter.java  |   0
 .../datamap/bloom/BloomCacheKeyValue.java          |   0
 .../datamap/bloom/BloomCoarseGrainDataMap.java     |   0
 .../bloom/BloomCoarseGrainDataMapFactory.java      |   0
 .../datamap/bloom/BloomDataMapBuilder.java         |   0
 .../datamap/bloom/BloomDataMapCache.java           |   0
 .../datamap/bloom/BloomDataMapDistributable.java   |   0
 .../datamap/bloom/BloomDataMapModel.java           |   0
 .../datamap/bloom/BloomDataMapWriter.java          |   0
 .../datamap/bloom/BloomIndexFileStore.java         |   0
 .../carbondata/datamap/bloom/DataConvertUtil.java  |   0
 .../hadoop/util/bloom/CarbonBloomFilter.java       |   0
 {datamap => index}/examples/pom.xml                |   6 ++--
 .../datamap/examples/BlockletMinMax.java           |   0
 .../datamap/examples/MinMaxDataWriter.java         |   0
 .../datamap/examples/MinMaxIndexBlockDetails.java  |   0
 .../datamap/examples/MinMaxIndexDataMap.java       |   0
 .../examples/MinMaxIndexDataMapFactory.java        |   0
 .../datamap/examples/MinMaxDataMapSuite.scala      |   0
 {datamap => index}/lucene/pom.xml                  |   2 +-
 .../datamap/lucene/LuceneDataMapBuilder.java       |   0
 .../datamap/lucene/LuceneDataMapDistributable.java |   0
 .../datamap/lucene/LuceneDataMapFactoryBase.java   |   0
 .../datamap/lucene/LuceneDataMapWriter.java        |   0
 .../datamap/lucene/LuceneFineGrainDataMap.java     |   0
 .../lucene/LuceneFineGrainDataMapFactory.java      |   0
 {secondary_index => index/secondary-index}/pom.xml |  12 ++++----
 .../CarbonDataFileMergeTestCaseOnSI.scala          |   0
 .../CarbonIndexFileMergeTestCaseWithSI.scala       |   0
 .../testsuite/secondaryindex/DropTableTest.scala   |   0
 .../InsertIntoCarbonTableTestCase.scala            |   0
 ...tAlterTableColumnRenameWithSecondaryIndex.scala |   0
 .../TestBroadCastSIFilterPushJoinWithUDF.scala     |   0
 .../TestCTASWithSecondaryIndex.scala               |   0
 .../secondaryindex/TestCacheOperationsForSI.scala  |   0
 .../testsuite/secondaryindex/TestCarbonJoin.scala  |   0
 .../TestCreateIndexForCleanAndDeleteSegment.scala  |   0
 .../secondaryindex/TestCreateIndexTable.scala      |   0
 .../TestCreateIndexWithLoadAndCompaction.scala     |   6 ++--
 .../TestLikeQueryWithSecondaryIndex.scala          |   0
 .../TestNIQueryWithSecondaryIndex.scala            |   0
 .../TestRegisterIndexCarbonTable.scala             |   0
 .../secondaryindex/TestSIWithAddSegment.scala      |   0
 .../secondaryindex/TestSIWithSecondryIndex.scala   |   0
 .../TestSecondaryIndexForORFilterPushDown.scala    |   0
 .../TestSecondaryIndexWithAggQueries.scala         |   0
 .../secondaryindex/TestSecondaryIndexWithIUD.scala |   0
 ...IndexWithIndexOnFirstColumnAndSortColumns.scala |   0
 .../TestSecondaryIndexWithLocalDictionary.scala    |   0
 .../TestSecondaryIndexWithUnsafeColumnPage.scala   |   0
 .../apache/spark/util/TestCarbonSegmentUtil.scala  |  34 ++++++++++-----------
 {datamap/mv => mv}/core/pom.xml                    |   6 ++--
 .../carbondata/mv/extension/MVAnalyzerRule.scala   |   0
 .../mv/extension/MVDataMapProvider.scala           |   0
 .../carbondata/mv/extension/MVExtension.scala      |   0
 .../mv/extension/MVExtensionSqlParser.scala        |   0
 .../apache/carbondata/mv/extension/MVHelper.scala  |   0
 .../apache/carbondata/mv/extension/MVParser.scala  |   0
 .../apache/carbondata/mv/extension/MVUtil.scala    |   0
 .../command/CreateMaterializedViewCommand.scala    |   0
 .../command/DropMaterializedViewCommand.scala      |   0
 .../command/RefreshMaterializedViewCommand.scala   |   0
 .../command/ShowMaterializedViewCommand.scala      |   0
 .../carbondata/mv/rewrite/DefaultMatchMaker.scala  |   0
 .../org/apache/carbondata/mv/rewrite/MVUdf.scala   |   0
 .../apache/carbondata/mv/rewrite/MatchMaker.scala  |   0
 .../apache/carbondata/mv/rewrite/Navigator.scala   |   0
 .../carbondata/mv/rewrite/QueryRewrite.scala       |   0
 .../mv/rewrite/SummaryDatasetCatalog.scala         |   0
 .../org/apache/carbondata/mv/rewrite/Utils.scala   |   0
 .../apache/carbondata/mv/session/MVSession.scala   |   0
 .../mv/session/internal/SessionState.scala         |   0
 .../carbondata/mv/timeseries/Granularity.java      |   0
 .../mv/timeseries/TimeSeriesFunction.scala         |   0
 .../carbondata/mv/timeseries/TimeSeriesUtil.scala  |   0
 .../carbondata/mv/extension/MVOptimizer.scala      |   0
 .../carbondata/mv/extension/MVOptimizer.scala      |   0
 .../mv/plans/ExtractJoinConditionsSuite.scala      |   0
 .../apache/carbondata/mv/plans/IsSPJGHSuite.scala  |   0
 .../mv/plans/LogicalToModularPlanSuite.scala       |   0
 .../carbondata/mv/plans/ModularToSQLSuite.scala    |   1 -
 .../carbondata/mv/plans/SignatureSuite.scala       |   1 -
 .../mv/plans/Tpcds_1_4_BenchmarkSuite.scala        |   0
 .../carbondata/mv/rewrite/MVCoalesceTestCase.scala |   0
 .../mv/rewrite/MVCountAndCaseTestCase.scala        |   0
 .../carbondata/mv/rewrite/MVCreateTestCase.scala   |   4 +--
 .../mv/rewrite/MVExceptionTestCase.scala           |   0
 .../mv/rewrite/MVFilterAndJoinTest.scala           |   1 +
 .../mv/rewrite/MVIncrementalLoadingTestcase.scala  |  16 +++++++---
 .../carbondata/mv/rewrite/MVInvalidTestCase.scala  |   0
 .../mv/rewrite/MVMultiJoinTestCase.scala           |   0
 .../carbondata/mv/rewrite/MVRewriteTestCase.scala  |   0
 .../carbondata/mv/rewrite/MVSampleTestCase.scala   |   4 +--
 .../carbondata/mv/rewrite/MVTPCDSTestCase.scala    |   4 +--
 .../carbondata/mv/rewrite/MVTpchTestCase.scala     |   4 +--
 .../mv/rewrite/SelectAllColumnsSuite.scala         |   0
 .../rewrite/SelectSelectExactChildrenSuite.scala   |   0
 .../mv/rewrite/TestAllOperationsOnMV.scala         |  14 ++++++---
 .../mv/rewrite/TestPartitionWithMV.scala           |   1 +
 .../carbondata/mv/rewrite/TestSQLSuite.scala       |   3 +-
 .../carbondata/mv/rewrite/Tpcds_1_4_Suite.scala    |   1 -
 .../mv/rewrite/matching/TestSQLBatch.scala         |   0
 .../mv/rewrite/matching/TestTPCDS_1_4_Batch.scala  |   0
 .../carbondata/mv/testutil/ModularPlanTest.scala   |   0
 .../carbondata/mv/testutil/TestSQLBatch.scala      |   0
 .../carbondata/mv/testutil/TestSQLBatch2.scala     |   0
 .../mv/testutil/Tpcds_1_4_QueryBatch.scala         |   0
 .../carbondata/mv/testutil/Tpcds_1_4_Tables.scala  |   0
 .../TestMVTimeSeriesCreateDataMapCommand.scala     |   0
 .../timeseries/TestMVTimeSeriesLoadAndQuery.scala  |   0
 .../timeseries/TestMVTimeSeriesQueryRollUp.scala   |   0
 {datamap/mv => mv}/plan/pom.xml                    |   6 ++--
 .../org/apache/carbondata/mv/dsl/package.scala     |   0
 .../mv/expressions/modular/subquery.scala          |   0
 .../mv/plans/modular/AggregatePushDown.scala       |   0
 .../apache/carbondata/mv/plans/modular/Flags.scala |   0
 .../carbondata/mv/plans/modular/Harmonizer.scala   |   0
 .../mv/plans/modular/ModularPatterns.scala         |   0
 .../carbondata/mv/plans/modular/ModularPlan.scala  |   0
 .../modular/ModularPlanSignatureGenerator.scala    |   0
 .../mv/plans/modular/ModularRelation.scala         |   0
 .../carbondata/mv/plans/modular/Modularizer.scala  |   0
 .../mv/plans/modular/basicOperators.scala          |   0
 .../carbondata/mv/plans/modular/queryGraph.scala   |   0
 .../org/apache/carbondata/mv/plans/package.scala   |   0
 .../mv/plans/util/BirdcageOptimizer.scala          |   0
 .../mv/plans/util/Logical2ModularExtractions.scala |   0
 .../plans/util/LogicalPlanSignatureGenerator.scala |   0
 .../apache/carbondata/mv/plans/util/Printers.scala |   0
 .../apache/carbondata/mv/plans/util/SQLBuild.scala |   0
 .../carbondata/mv/plans/util/SQLBuildDSL.scala     |   0
 .../carbondata/mv/plans/util/SQLBuilder.scala      |   0
 .../carbondata/mv/plans/util/Signature.scala       |   0
 .../carbondata/mv/plans/util/TableCluster.scala    |   0
 {store => sdk}/CSDK/CMakeLists.txt                 |   0
 {store => sdk}/CSDK/src/CarbonProperties.cpp       |   0
 {store => sdk}/CSDK/src/CarbonProperties.h         |   0
 {store => sdk}/CSDK/src/CarbonReader.cpp           |   0
 {store => sdk}/CSDK/src/CarbonReader.h             |   0
 {store => sdk}/CSDK/src/CarbonRow.cpp              |   0
 {store => sdk}/CSDK/src/CarbonRow.h                |   0
 {store => sdk}/CSDK/src/CarbonSchemaReader.cpp     |   0
 {store => sdk}/CSDK/src/CarbonSchemaReader.h       |   0
 {store => sdk}/CSDK/src/CarbonWriter.cpp           |   0
 {store => sdk}/CSDK/src/CarbonWriter.h             |   0
 {store => sdk}/CSDK/src/Configuration.cpp          |   0
 {store => sdk}/CSDK/src/Configuration.h            |   0
 {store => sdk}/CSDK/src/Schema.cpp                 |   0
 {store => sdk}/CSDK/src/Schema.h                   |   0
 {store => sdk}/CSDK/test/main.cpp                  |   2 +-
 {store => sdk}/sdk/pom.xml                         |   4 +--
 .../carbondata/sdk/file/ArrowCarbonReader.java     |   0
 .../carbondata/sdk/file/AvroCarbonWriter.java      |   0
 .../carbondata/sdk/file/CSVCarbonWriter.java       |   0
 .../apache/carbondata/sdk/file/CarbonReader.java   |   0
 .../carbondata/sdk/file/CarbonReaderBuilder.java   |   0
 .../carbondata/sdk/file/CarbonSchemaReader.java    |   0
 .../apache/carbondata/sdk/file/CarbonWriter.java   |   0
 .../carbondata/sdk/file/CarbonWriterBuilder.java   |   0
 .../java/org/apache/carbondata/sdk/file/Field.java |   0
 .../carbondata/sdk/file/JsonCarbonWriter.java      |   0
 .../org/apache/carbondata/sdk/file/RowUtil.java    |   0
 .../org/apache/carbondata/sdk/file/Schema.java     |   0
 .../org/apache/carbondata/sdk/file/TestUtil.java   |   0
 .../carbondata/sdk/file/arrow/ArrowConverter.java  |   0
 .../sdk/file/arrow/ArrowFieldWriter.java           |   0
 .../carbondata/sdk/file/arrow/ArrowUtils.java      |   0
 .../carbondata/sdk/file/arrow/ArrowWriter.java     |   0
 .../apache/carbondata/sdk/file/utils/SDKUtil.java  |   0
 .../carbondata/store/CarbonRowReadSupport.java     |   0
 .../org/apache/carbondata/store/CarbonStore.java   |   0
 .../apache/carbondata/store/LocalCarbonStore.java  |   0
 .../carbondata/store/MetaCachedCarbonStore.java    |   0
 .../sdk/src/main/resources/log4j.properties        |   0
 .../carbondata/sdk/file/ArrowCarbonReaderTest.java |   0
 .../carbondata/sdk/file/AvroCarbonWriterTest.java  |   0
 .../carbondata/sdk/file/CSVCarbonWriterTest.java   |   0
 .../carbondata/sdk/file/CarbonReaderTest.java      |   0
 .../sdk/file/CarbonSchemaReaderTest.java           |   0
 .../sdk/file/ConcurrentAvroSdkWriterTest.java      |   0
 .../sdk/file/ConcurrentSdkReaderTest.java          |   0
 .../sdk/file/ConcurrentSdkWriterTest.java          |   0
 .../org/apache/carbondata/sdk/file/ImageTest.java  |   0
 .../org/apache/carbondata/sdk/file/MinMaxTest.java |   0
 .../sdk/file/MultithreadSDKBlockletReaderTest.java |   0
 .../carbondata/store/LocalCarbonStoreTest.java     |   0
 .../org/apache/carbondata/util/BinaryUtil.java     |   0
 .../src/test/resources/image/carbondatalogo.jpg    | Bin
 .../image/flowers/10686568196_b1915544a8.jpg       | Bin
 .../image/flowers/10686568196_b1915544a8.txt       |   0
 .../image/flowers/10712722853_5632165b04.jpg       | Bin
 .../image/flowers/10712722853_5632165b04.txt       |   0
 .../flowers/subfolder/10841136265_af473efc60.jpg   | Bin
 .../flowers/subfolder/10841136265_af473efc60.txt   |   0
 .../src/test/resources/image/voc/2007_000027.jpg   | Bin
 .../src/test/resources/image/voc/2007_000027.xml   |   0
 .../src/test/resources/image/voc}/2007_000032.jpg  | Bin
 .../src/test/resources/image/voc/2007_000032.xml   |   0
 .../src/test/resources/image/voc}/2007_000033.jpg  | Bin
 .../src/test/resources/image/voc/2007_000033.xml   |   0
 .../src/test/resources/image/voc/2007_000039.jpg   | Bin
 .../src/test/resources/image/voc/2007_000039.xml   |   0
 .../src/test/resources/image/voc/2009_001444.jpg   | Bin
 .../src/test/resources/image/voc/2009_001444.xml   |   0
 .../image/vocForSegmentationClass}/2007_000032.jpg | Bin
 .../image/vocForSegmentationClass/2007_000032.png  | Bin
 .../image/vocForSegmentationClass}/2007_000033.jpg | Bin
 .../image/vocForSegmentationClass/2007_000033.png  | Bin
 .../image/vocForSegmentationClass/2007_000042.jpg  | Bin
 .../image/vocForSegmentationClass/2007_000042.png  | Bin
 261 files changed, 117 insertions(+), 105 deletions(-)

diff --git a/examples/spark2/pom.xml b/examples/spark/pom.xml
similarity index 98%
rename from examples/spark2/pom.xml
rename to examples/spark/pom.xml
index b08ea7a..0be57aa 100644
--- a/examples/spark2/pom.xml
+++ b/examples/spark/pom.xml
@@ -41,7 +41,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
@@ -51,7 +51,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
+      <artifactId>carbondata-sdk</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
similarity index 100%
rename from examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
rename to examples/spark/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java b/examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
similarity index 100%
rename from examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
rename to examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java b/examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
similarity index 100%
rename from examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
rename to examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3ReadExample.java
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3SchemaReadExample.java b/examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3SchemaReadExample.java
similarity index 100%
rename from examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3SchemaReadExample.java
rename to examples/spark/src/main/java/org/apache/carbondata/examples/sdk/SDKS3SchemaReadExample.java
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java b/examples/spark/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
similarity index 97%
rename from examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
rename to examples/spark/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
index 6bee158..458263f 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
+++ b/examples/spark/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
@@ -59,7 +59,7 @@ public class JavaCarbonSessionExample {
     String rootPath =
         new File(JavaCarbonSessionExample.class.getResource("/").getPath() + "../../../..")
             .getCanonicalPath();
-    String path = rootPath + "/examples/spark2/src/main/resources/data.csv";
+    String path = rootPath + "/examples/spark/src/main/resources/data.csv";
     carbon.sql("LOAD DATA LOCAL INPATH " + "\'" + path + "\' " + "INTO TABLE source "
         + "OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')");
 
diff --git a/examples/spark2/src/main/resources/Test_Data1.csv b/examples/spark/src/main/resources/Test_Data1.csv
similarity index 100%
rename from examples/spark2/src/main/resources/Test_Data1.csv
rename to examples/spark/src/main/resources/Test_Data1.csv
diff --git a/examples/spark2/src/main/resources/complexdata.csv b/examples/spark/src/main/resources/complexdata.csv
similarity index 100%
rename from examples/spark2/src/main/resources/complexdata.csv
rename to examples/spark/src/main/resources/complexdata.csv
diff --git a/examples/spark2/src/main/resources/data.csv b/examples/spark/src/main/resources/data.csv
similarity index 100%
rename from examples/spark2/src/main/resources/data.csv
rename to examples/spark/src/main/resources/data.csv
diff --git a/examples/spark2/src/main/resources/data1.csv b/examples/spark/src/main/resources/data1.csv
similarity index 100%
rename from examples/spark2/src/main/resources/data1.csv
rename to examples/spark/src/main/resources/data1.csv
diff --git a/examples/spark2/src/main/resources/dataSample.csv b/examples/spark/src/main/resources/dataSample.csv
similarity index 100%
rename from examples/spark2/src/main/resources/dataSample.csv
rename to examples/spark/src/main/resources/dataSample.csv
diff --git a/examples/spark2/src/main/resources/dimSample.csv b/examples/spark/src/main/resources/dimSample.csv
similarity index 100%
rename from examples/spark2/src/main/resources/dimSample.csv
rename to examples/spark/src/main/resources/dimSample.csv
diff --git a/examples/spark2/src/main/resources/factSample.csv b/examples/spark/src/main/resources/factSample.csv
similarity index 100%
rename from examples/spark2/src/main/resources/factSample.csv
rename to examples/spark/src/main/resources/factSample.csv
diff --git a/examples/spark2/src/main/resources/log4j.properties b/examples/spark/src/main/resources/log4j.properties
similarity index 100%
rename from examples/spark2/src/main/resources/log4j.properties
rename to examples/spark/src/main/resources/log4j.properties
diff --git a/examples/spark2/src/main/resources/sample.csv b/examples/spark/src/main/resources/sample.csv
similarity index 100%
rename from examples/spark2/src/main/resources/sample.csv
rename to examples/spark/src/main/resources/sample.csv
diff --git a/examples/spark2/src/main/resources/streamSample.csv b/examples/spark/src/main/resources/streamSample.csv
similarity index 100%
rename from examples/spark2/src/main/resources/streamSample.csv
rename to examples/spark/src/main/resources/streamSample.csv
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
similarity index 99%
rename from examples/spark2/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
index 17012c4..d1ca452 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/ConcurrentQueryBenchmark.scala
@@ -470,7 +470,7 @@ object ConcurrentQueryBenchmark {
       runInLocal = if (arr(5).equalsIgnoreCase("true")) {
         val rootPath = new File(this.getClass.getResource("/").getPath
           + "../../../..").getCanonicalPath
-        storeLocation = s"$rootPath/examples/spark2/target/store"
+        storeLocation = s"$rootPath/examples/spark/target/store"
         true
       } else if (arr(5).equalsIgnoreCase("false")) {
         false
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/Query.scala b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/Query.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/benchmark/Query.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/benchmark/Query.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
similarity index 99%
rename from examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
index a2e1b6e..823c071 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/SCDType2Benchmark.scala
@@ -138,7 +138,7 @@ object SCDType2Benchmark {
       .builder()
       .master("local[8]")
       .enableHiveSupport()
-      .config("spark.sql.warehouse.dir", s"$rootPath/examples/spark2/target/warehouse")
+      .config("spark.sql.warehouse.dir", s"$rootPath/examples/spark/target/warehouse")
       .getOrCreateCarbonSession()
     spark.sparkContext.setLogLevel("error")
 
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
similarity index 99%
rename from examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
index 4e69c5a..694acde 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
@@ -312,7 +312,7 @@ object SimpleQueryBenchmark {
     import org.apache.spark.sql.CarbonSession._
     val rootPath = new File(this.getClass.getResource("/").getPath
         + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/examples/spark2/target/store"
+    val storeLocation = s"$rootPath/examples/spark/target/store"
     val master = Option(System.getProperty("spark.master"))
       .orElse(sys.env.get("MASTER"))
       .orElse(Option("local[8]"))
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
similarity index 96%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
index e3411aa..4f9a9bd 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
@@ -31,10 +31,10 @@ object CarbonSessionExample {
   def main(args: Array[String]) {
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    System.setProperty("path.target", s"$rootPath/examples/spark2/target")
+    System.setProperty("path.target", s"$rootPath/examples/spark/target")
     // print profiler log to a separated file: target/profiler.log
     PropertyConfigurator.configure(
-      s"$rootPath/examples/spark2/src/main/resources/log4j.properties")
+      s"$rootPath/examples/spark/src/main/resources/log4j.properties")
 
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "false")
@@ -76,7 +76,7 @@ object CarbonSessionExample {
          | $formatSyntax
        """.stripMargin)
 
-    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
 
     // scalastyle:off
     spark.sql(
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
index a521bdc..8ccaa0f 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
@@ -85,7 +85,7 @@ object CarbonSortColumnsExample {
          | TBLPROPERTIES('SORT_COLUMNS'='intField, stringField, charField')
        """.stripMargin)
 
-    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
 
     // scalastyle:off
     spark.sql(
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
similarity index 97%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
index cfa2766..e7a6a10 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
@@ -61,7 +61,7 @@ object CustomCompactionExample {
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/src/main/resources/dataSample.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/dataSample.csv"
 
     // load 4 segments
     // scalastyle:off
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
index 814a553..d267d71 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
@@ -53,7 +53,7 @@ object DataManagementExample {
 
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/src/main/resources/dataSample.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/dataSample.csv"
 
     // load data 5 times, each load of data is called a segment in CarbonData
     // scalastyle:off
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
index 1b0145a..14b249c 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
@@ -44,7 +44,7 @@ object DirectSQLExample {
 
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/target/carbonFile/"
+    val path = s"$rootPath/examples/spark/target/carbonFile/"
 
     import carbonSession._
     // 1. generate data file
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
similarity index 97%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
index 2fb62b3..1b1f77c 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
@@ -64,7 +64,7 @@ object ExternalTableExample {
 
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
 
     // load 4 times, each load has 10 rows data
     // scalastyle:off
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
similarity index 97%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
index 5b2332c..bddc8ae 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
@@ -33,7 +33,7 @@ object HadoopFileExample {
     val spark = ExampleUtils.createSparkSession("HadoopFileExample")
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val storeLocation: String = rootPath + "/examples/spark2/target/store/default"
+    val storeLocation: String = rootPath + "/examples/spark/target/store/default"
     exampleBody(spark, storeLocation)
     spark.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
similarity index 94%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
index 7f0a23b..0e827e6 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
@@ -34,10 +34,10 @@ object HiveExample {
   private val driverName: String = "org.apache.hive.jdbc.HiveDriver"
   val rootPath = new File(this.getClass.getResource("/").getPath
                           + "../../../..").getCanonicalPath
-  val targetLoc = s"$rootPath/examples/spark2/target"
-  System.setProperty("derby.system.home", s"$targetLoc")
-  val metaStoreLoc = s"$targetLoc/metastore_db"
-  val logger = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+  val projectLoc = s"$rootPath/examples/spark"
+  System.setProperty("derby.system.home", s"$projectLoc")
+  val metaStoreLoc = s"$projectLoc/metastore_db"
+  val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
 
   def main(args: Array[String]) {
     val sparkSession = ExampleUtils.createSparkSession("HiveExample")
@@ -58,7 +58,7 @@ object HiveExample {
        """.stripMargin)
 
     var inputPath = FileFactory
-      .getUpdatedFilePath(s"$rootPath/examples/spark2/src/main/resources/sample.csv")
+      .getUpdatedFilePath(s"$rootPath/examples/spark/src/main/resources/sample.csv")
 
     sparkSession.sql(
       s"""
@@ -83,7 +83,7 @@ object HiveExample {
            |STORED AS carbondata""".stripMargin)
 
     inputPath = FileFactory
-      .getUpdatedFilePath(s"$rootPath/examples/spark2/src/main/resources/Test_Data1.csv")
+      .getUpdatedFilePath(s"$rootPath/examples/spark/src/main/resources/Test_Data1.csv")
 
     sparkSession
       .sql(
@@ -127,17 +127,17 @@ object HiveExample {
     }
 
     // make HDFS writable
-    val path = new Path(targetLoc)
+    val path = new Path(projectLoc)
     val fileSys = path.getFileSystem(FileFactory.getConfiguration)
     fileSys.setPermission(path, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
 
     val hiveEmbeddedServer2 = new HiveEmbeddedServer2()
-    hiveEmbeddedServer2.start(targetLoc)
+    hiveEmbeddedServer2.start(projectLoc)
     val port = hiveEmbeddedServer2.getFreePort
     val connection = DriverManager.getConnection(s"jdbc:hive2://localhost:$port/default", "", "")
     val statement: Statement = connection.createStatement
 
-    logger.info(s"============HIVE CLI IS STARTED ON PORT $port ==============")
+    LOGGER.info(s"============HIVE CLI IS STARTED ON PORT $port ==============")
 
     val resultSet: ResultSet = statement.executeQuery("SELECT * FROM HIVE_CARBON_EXAMPLE")
 
@@ -173,7 +173,7 @@ object HiveExample {
     println(s"******Total Number Of Rows Fetched ****** $rowsFetched")
     assert(rowsFetched == 4)
 
-    logger.info("Fetching the Individual Columns ")
+    LOGGER.info("Fetching the Individual Columns ")
 
     // fetching the separate columns
     var individualColRowsFetched = 0
@@ -204,7 +204,7 @@ object HiveExample {
             s"$individualColRowsFetched")
     assert(individualColRowsFetched == 4)
 
-    logger.info("Fetching the Out Of Order Columns ")
+    LOGGER.info("Fetching the Out Of Order Columns ")
 
     val resultOutOfOrderCol = statement
       .executeQuery("SELECT SALARY,ID,NAME FROM HIVE_CARBON_EXAMPLE")
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/MVExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/MVExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/MVExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/MVExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
index fdcd029..3a8e630 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
@@ -63,7 +63,7 @@ object QuerySegmentExample {
 
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
 
     // load 4 segments, each load has 10 rows data
     // scalastyle:off
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
similarity index 96%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
index 64a836e..f9a5b90 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
@@ -52,7 +52,7 @@ object S3CsvExample {
       .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
       .getOrCreate()
 
-    spark.sparkContext.setLogLevel("INFO")
+    spark.sparkContext.setLogLevel("ERROR")
 
     spark.sql(
       s"""
@@ -69,7 +69,7 @@ object S3CsvExample {
          | floatField FLOAT
          | )
          | STORED AS carbondata
-         | LOCATION '$rootPath/examples/spark2/target/store'
+         | LOCATION '$rootPath/examples/spark/target/store'
        """.stripMargin)
 
     spark.sql(
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/S3Example.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/S3Example.scala
index 1ae1dec..aa2a39e 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/S3Example.scala
@@ -38,7 +38,7 @@ object S3Example {
   def main(args: Array[String]) {
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/src/main/resources/data1.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/data1.csv"
     val logger: Logger = LoggerFactory.getLogger(this.getClass)
 
     if (args.length < 3 || args.length > 5) {
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
index 9651471..cee83bc 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
@@ -35,7 +35,7 @@ object SparkSessionExample {
                           + "../../../..").getCanonicalPath
   def main(args: Array[String]): Unit = {
     val sparkSession = ExampleUtils.createSparkSession("SparkSessionExample")
-    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
     sparkSession.sql("DROP TABLE IF EXISTS csv_table")
     sparkSession.sql(
       s"""
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
index 29b05cd..67bcdae 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
@@ -46,7 +46,7 @@ object SparkStreamingExample {
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
     val checkpointPath =
-      s"$rootPath/examples/spark2/target/spark_streaming_cp_" +
+      s"$rootPath/examples/spark/target/spark_streaming_cp_" +
       System.currentTimeMillis().toString()
     val streamTableName = s"dstream_stream_table"
 
@@ -73,7 +73,7 @@ object SparkStreamingExample {
            | """.stripMargin)
       val carbonTable = CarbonEnv.getCarbonTable(Some("default"), streamTableName)(spark)
       // batch load
-      val path = s"$rootPath/examples/spark2/src/main/resources/streamSample.csv"
+      val path = s"$rootPath/examples/spark/src/main/resources/streamSample.csv"
       spark.sql(
         s"""
            | LOAD DATA LOCAL INPATH '$path'
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
index c6c1e0a..c9c27f8 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
@@ -42,7 +42,7 @@ object StandardPartitionExample {
       .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val testData = s"$rootPath/integration/spark-common-test/src/test/resources/" +
+    val testData = s"$rootPath/integration/spark/src/test/resources/" +
                    s"partition_data_example.csv"
     /**
      * 1. Partition basic usages
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala
similarity index 100%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
similarity index 97%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
index 9d5346b..6a42d79 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
@@ -42,7 +42,7 @@ object StreamingUsingBatchLoadExample {
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
     val checkpointPath =
-      s"$rootPath/examples/spark2/target/spark_streaming_cp_" +
+      s"$rootPath/examples/spark/target/spark_streaming_cp_" +
       System.currentTimeMillis().toString()
     val streamTableName = s"dstream_batch_table"
 
@@ -72,7 +72,7 @@ object StreamingUsingBatchLoadExample {
 
       val carbonTable = CarbonEnv.getCarbonTable(Some("default"), streamTableName)(spark)
       // batch load
-      val path = s"$rootPath/examples/spark2/src/main/resources/streamSample.csv"
+      val path = s"$rootPath/examples/spark/src/main/resources/streamSample.csv"
       spark.sql(
         s"""
            | LOAD DATA LOCAL INPATH '$path'
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
index cd206b6..ae877d1 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
@@ -78,7 +78,7 @@ object StreamingWithRowParserExample {
 
       val carbonTable = CarbonEnv.getCarbonTable(Some("default"), streamTableName)(spark)
       // batch load
-      val path = s"$rootPath/examples/spark2/src/main/resources/streamSample.csv"
+      val path = s"$rootPath/examples/spark/src/main/resources/streamSample.csv"
       spark.sql(
         s"""
            | LOAD DATA LOCAL INPATH '$path'
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
index 0f7dc44..c6b032f 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
@@ -75,7 +75,7 @@ object StructuredStreamingExample {
 
       val carbonTable = CarbonEnv.getCarbonTable(Some("default"), streamTableName)(spark)
       // batch load
-      val path = s"$rootPath/examples/spark2/src/main/resources/streamSample.csv"
+      val path = s"$rootPath/examples/spark/src/main/resources/streamSample.csv"
       spark.sql(
         s"""
            | LOAD DATA LOCAL INPATH '$path'
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
similarity index 98%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
index f2fbbfb..18f7d7e 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
@@ -66,7 +66,7 @@ object TableLevelCompactionOptionExample {
 
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val path = s"$rootPath/examples/spark2/src/main/resources/dataSample.csv"
+    val path = s"$rootPath/examples/spark/src/main/resources/dataSample.csv"
 
     // load 6 segments
     // scalastyle:off
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
similarity index 95%
rename from examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
rename to examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
index 070d709..191dd82 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
+++ b/examples/spark/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
@@ -35,13 +35,13 @@ object ExampleUtils {
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
 
-    val warehouse = s"$rootPath/examples/spark2/target/warehouse"
-    val metaStoreDB = s"$rootPath/examples/spark2/target"
+    val warehouse = s"$rootPath/examples/spark/target/warehouse"
+    val metaStoreDB = s"$rootPath/examples/spark/target"
 
     val storeLocation = if (null != storePath) {
       storePath
     } else {
-      s"$rootPath/examples/spark2/target/store"
+      s"$rootPath/examples/spark/target/store"
     }
 
     CarbonProperties.getInstance()
@@ -74,7 +74,7 @@ object ExampleUtils {
   def createSparkSession(appName: String, workThreadNum: Int = 1): SparkSession = {
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
-    val warehouse = s"$rootPath/examples/spark2/target/warehouse"
+    val warehouse = s"$rootPath/examples/spark/target/warehouse"
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
       .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
diff --git a/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala b/examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
similarity index 98%
rename from examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
rename to examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
index 95822db..7d737dd 100644
--- a/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
+++ b/examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
@@ -41,7 +41,7 @@ class RunExamples extends QueryTest with BeforeAndAfterAll {
   override def beforeAll: Unit = {
     val rootPath = new File(this.getClass.getResource("/").getPath
       + "../../../..").getCanonicalPath
-    val targetLoc = s"$rootPath/examples/spark2/target"
+    val targetLoc = s"$rootPath/examples/spark/target"
 
     System.setProperty("derby.system.home", s"$targetLoc")
     CarbonProperties.getInstance().addProperty(
diff --git a/datamap/bloom/pom.xml b/index/bloom/pom.xml
similarity index 97%
rename from datamap/bloom/pom.xml
rename to index/bloom/pom.xml
index 6ec4d0e..0b391a3 100644
--- a/datamap/bloom/pom.xml
+++ b/index/bloom/pom.xml
@@ -11,7 +11,7 @@
   </parent>
 
   <artifactId>carbondata-bloom</artifactId>
-  <name>Apache CarbonData :: Bloom Index DataMap</name>
+  <name>Apache CarbonData :: Bloom Index</name>
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/AbstractBloomDataMapWriter.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCacheKeyValue.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCacheKeyValue.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCacheKeyValue.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCacheKeyValue.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapDistributable.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapDistributable.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapDistributable.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapDistributable.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapModel.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapModel.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapModel.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapModel.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomIndexFileStore.java
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/DataConvertUtil.java b/index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/DataConvertUtil.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/DataConvertUtil.java
rename to index/bloom/src/main/java/org/apache/carbondata/datamap/bloom/DataConvertUtil.java
diff --git a/datamap/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java b/index/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java
similarity index 100%
rename from datamap/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java
rename to index/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java
diff --git a/datamap/examples/pom.xml b/index/examples/pom.xml
similarity index 93%
rename from datamap/examples/pom.xml
rename to index/examples/pom.xml
index 7e32888..3fa440b 100644
--- a/datamap/examples/pom.xml
+++ b/index/examples/pom.xml
@@ -28,8 +28,8 @@
     <relativePath>../../pom.xml</relativePath>
   </parent>
 
-  <artifactId>carbondata-datamap-examples</artifactId>
-  <name>Apache CarbonData :: DataMap Examples</name>
+  <artifactId>carbondata-index-examples</artifactId>
+  <name>Apache CarbonData :: Index Examples</name>
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
@@ -38,7 +38,7 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/BlockletMinMax.java b/index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/BlockletMinMax.java
similarity index 100%
rename from datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/BlockletMinMax.java
rename to index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/BlockletMinMax.java
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java b/index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
similarity index 100%
rename from datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
rename to index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxDataWriter.java
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexBlockDetails.java b/index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexBlockDetails.java
similarity index 100%
rename from datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexBlockDetails.java
rename to index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexBlockDetails.java
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java b/index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
similarity index 100%
rename from datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
rename to index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java b/index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
similarity index 100%
rename from datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
rename to index/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMapFactory.java
diff --git a/datamap/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala b/index/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala
similarity index 100%
rename from datamap/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala
rename to index/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala
diff --git a/datamap/lucene/pom.xml b/index/lucene/pom.xml
similarity index 98%
rename from datamap/lucene/pom.xml
rename to index/lucene/pom.xml
index cc7deb8..bb5f8d5 100644
--- a/datamap/lucene/pom.xml
+++ b/index/lucene/pom.xml
@@ -11,7 +11,7 @@
   </parent>
 
   <artifactId>carbondata-lucene</artifactId>
-  <name>Apache CarbonData :: Lucene Index DataMap</name>
+  <name>Apache CarbonData :: Lucene Index</name>
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java b/index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
similarity index 100%
rename from datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
rename to index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapDistributable.java b/index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapDistributable.java
similarity index 100%
rename from datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapDistributable.java
rename to index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapDistributable.java
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java b/index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
similarity index 100%
rename from datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
rename to index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
similarity index 100%
rename from datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
rename to index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
similarity index 100%
rename from datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
rename to index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java b/index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
similarity index 100%
rename from datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
rename to index/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
diff --git a/secondary_index/pom.xml b/index/secondary-index/pom.xml
similarity index 96%
rename from secondary_index/pom.xml
rename to index/secondary-index/pom.xml
index 749d6a6..2160b2b 100644
--- a/secondary_index/pom.xml
+++ b/index/secondary-index/pom.xml
@@ -23,14 +23,14 @@
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
     <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../pom.xml</relativePath>
+    <relativePath>../../pom.xml</relativePath>
   </parent>
 
-  <artifactId>carbondata-secondary_index</artifactId>
-  <name>Apache CarbonData :: SecondaryIndex</name>
+  <artifactId>carbondata-secondary-index</artifactId>
+  <name>Apache CarbonData :: Secondary Index</name>
 
   <properties>
-    <dev.path>${basedir}/../dev</dev.path>
+    <dev.path>${basedir}/../../dev</dev.path>
     <jacoco.append>true</jacoco.append>
   </properties>
 
@@ -45,7 +45,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
       <exclusions>
         <exclusion>
@@ -69,7 +69,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
+      <artifactId>carbondata-sdk</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithSecondaryIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithSecondaryIndex.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithSecondaryIndex.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithSecondaryIndex.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithSecondaryIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithSecondaryIndex.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithSecondaryIndex.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithSecondaryIndex.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
similarity index 98%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
index f8a6f0f..b0ae754 100644
--- a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
@@ -23,7 +23,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus, SegmentStatusManager}
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.spark.sql.hive.CarbonRelation
-import org.apache.spark.sql.test.Spark2TestQueryExecutor
+import org.apache.spark.sql.test.SparkTestQueryExecutor
 import org.apache.spark.sql.test.util.QueryTest
 
 import org.apache.carbondata.core.util.path.CarbonTablePath
@@ -206,8 +206,8 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
       sql("alter table si_compaction_test compact 'minor'")
 
       // get index table from relation
-      val indexCarbonTable = CarbonEnv.getInstance(Spark2TestQueryExecutor.spark).carbonMetaStore
-        .lookupRelation(Option("default"), "alter_i1")(Spark2TestQueryExecutor.spark)
+      val indexCarbonTable = CarbonEnv.getInstance(SparkTestQueryExecutor.spark).carbonMetaStore
+        .lookupRelation(Option("default"), "alter_i1")(SparkTestQueryExecutor.spark)
         .asInstanceOf[CarbonRelation].carbonTable
       // read load metadata details
       val loadDetails: Array[LoadMetadataDetails] = SegmentStatusManager
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithSecondaryIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithSecondaryIndex.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithSecondaryIndex.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithSecondaryIndex.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithSecondaryIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithSecondaryIndex.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithSecondaryIndex.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithSecondaryIndex.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexForORFilterPushDown.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexForORFilterPushDown.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexForORFilterPushDown.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexForORFilterPushDown.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithAggQueries.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithAggQueries.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithAggQueries.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithAggQueries.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIUD.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIUD.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIUD.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIUD.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIndexOnFirstColumnAndSortColumns.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIndexOnFirstColumnAndSortColumns.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIndexOnFirstColumnAndSortColumns.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithIndexOnFirstColumnAndSortColumns.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithLocalDictionary.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithLocalDictionary.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithLocalDictionary.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithLocalDictionary.scala
diff --git a/secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithUnsafeColumnPage.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithUnsafeColumnPage.scala
similarity index 100%
rename from secondary_index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithUnsafeColumnPage.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSecondaryIndexWithUnsafeColumnPage.scala
diff --git a/secondary_index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala b/index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
similarity index 92%
rename from secondary_index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
rename to index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
index 1541581..5e81256 100644
--- a/secondary_index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
+++ b/index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.execution.strategy.CarbonDataSourceScan
 import org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin
 import org.apache.spark.sql.secondaryindex.util.SecondaryIndexUtil
-import org.apache.spark.sql.test.{Spark2TestQueryExecutor, TestQueryExecutor}
+import org.apache.spark.sql.test.{SparkTestQueryExecutor, TestQueryExecutor}
 import org.apache.spark.sql.test.util.QueryTest
 
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatusManager}
@@ -60,7 +60,7 @@ class TestCarbonSegmentUtil extends QueryTest {
   def test_getFilteredSegmentsUsingDataFrame() {
     createTable(tableName)
     val expected = BroadCastSIFilterPushJoin
-      .getFilteredSegments(s"select * from $tableName", Spark2TestQueryExecutor.spark)
+      .getFilteredSegments(s"select * from $tableName", SparkTestQueryExecutor.spark)
     assert(expected.length == 4)
     dropTables(tableName)
   }
@@ -73,7 +73,7 @@ class TestCarbonSegmentUtil extends QueryTest {
     val exception = intercept[UnsupportedOperationException] {
       BroadCastSIFilterPushJoin
         .getFilteredSegments("select * from test_table t1 join test_table1 t2 on t1.c1=t2.c1",
-          Spark2TestQueryExecutor.spark)
+          SparkTestQueryExecutor.spark)
     }
     exception.getMessage.contains("Get Filter Segments API supports if and only if only " +
                                   "one carbon main table is present in query.")
@@ -88,7 +88,7 @@ class TestCarbonSegmentUtil extends QueryTest {
     val exception = intercept[UnsupportedOperationException] {
       BroadCastSIFilterPushJoin
         .getFilteredSegments(s"select * from $tableName",
-          Spark2TestQueryExecutor.spark)
+          SparkTestQueryExecutor.spark)
     }
     exception.getMessage.contains("Get Filter Segments API supports if and only if " +
                                   "only one carbon main table is present in query.")
@@ -99,7 +99,7 @@ class TestCarbonSegmentUtil extends QueryTest {
   def test_identifySegmentsToBeMerged_Major() {
     createTable(tableName)
     val expected = SecondaryIndexUtil
-      .identifySegmentsToBeMerged(Spark2TestQueryExecutor.spark,
+      .identifySegmentsToBeMerged(SparkTestQueryExecutor.spark,
         tableName,
         databaseName)
     assert(expected.size() == 4)
@@ -115,7 +115,7 @@ class TestCarbonSegmentUtil extends QueryTest {
     sql(s"delete from table $tableName where SEGMENT.ID in (1)")
     sql(s"show segments for table $tableName").show(false)
     val expected = SecondaryIndexUtil
-      .identifySegmentsToBeMerged(Spark2TestQueryExecutor.spark,
+      .identifySegmentsToBeMerged(SparkTestQueryExecutor.spark,
         tableName,
         databaseName)
     assert(expected.size() == 0)
@@ -127,12 +127,12 @@ class TestCarbonSegmentUtil extends QueryTest {
   def test_identifySegmentsToBeMergedCustom() {
     createTable(tableName)
     val carbonTable = CarbonEnv
-      .getCarbonTable(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
+      .getCarbonTable(Option(databaseName), tableName)(SparkTestQueryExecutor.spark)
     val customSegments = new util.ArrayList[String]()
     customSegments.add("1")
     customSegments.add("2")
     val expected = SecondaryIndexUtil
-      .identifySegmentsToBeMergedCustom(Spark2TestQueryExecutor.spark,
+      .identifySegmentsToBeMergedCustom(SparkTestQueryExecutor.spark,
         tableName,
         databaseName,
         customSegments
@@ -146,7 +146,7 @@ class TestCarbonSegmentUtil extends QueryTest {
   def test_getMergedLoadName() {
     createTable(tableName)
     val carbonTable = CarbonEnv
-      .getCarbonTable(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
+      .getCarbonTable(Option(databaseName), tableName)(SparkTestQueryExecutor.spark)
     val loadMetadataDetails = SegmentStatusManager.readLoadMetadata(carbonTable.getMetadataPath)
     val expected = SecondaryIndexUtil
       .getMergedLoadName(loadMetadataDetails.toList.asJava)
@@ -161,7 +161,7 @@ class TestCarbonSegmentUtil extends QueryTest {
     sql(s"CREATE TABLE $tableName(c1 string, c2 string, c3 string) STORED AS carbondata")
     sql(s"INSERT INTO $tableName SELECT 'c1v1', '1', 'c3v1'")
     val carbonTable = CarbonEnv
-      .getCarbonTable(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
+      .getCarbonTable(Option(databaseName), tableName)(SparkTestQueryExecutor.spark)
     val loadMetadataDetails = SegmentStatusManager.readLoadMetadata(carbonTable.getMetadataPath)
     val exception = intercept[UnsupportedOperationException] {
       SecondaryIndexUtil
@@ -178,7 +178,7 @@ class TestCarbonSegmentUtil extends QueryTest {
   def test_getMergedLoadName_unsorted_segment_list() {
     createTable(tableName)
     val carbonTable = CarbonEnv
-      .getCarbonTable(Option(databaseName), tableName)(Spark2TestQueryExecutor.spark)
+      .getCarbonTable(Option(databaseName), tableName)(SparkTestQueryExecutor.spark)
     val loadMetadataDetails = SegmentStatusManager.readLoadMetadata(carbonTable.getMetadataPath)
     val segments: util.List[LoadMetadataDetails] = new util.ArrayList[LoadMetadataDetails]()
     val load1 = new LoadMetadataDetails()
@@ -201,12 +201,12 @@ class TestCarbonSegmentUtil extends QueryTest {
   def test_getFilteredSegments_set_segments() {
     createTable(tableName)
     val expected = BroadCastSIFilterPushJoin
-      .getFilteredSegments(s"select * from $tableName", Spark2TestQueryExecutor.spark)
+      .getFilteredSegments(s"select * from $tableName", SparkTestQueryExecutor.spark)
     assert(expected.length == 4)
     sql(s"set carbon.input.segments.$databaseName.$tableName=0")
     val dataFrame_with_set_seg = sql(s"select count(*) from $tableName where c1='c1v1'")
     assert(dataFrame_with_set_seg.collect().length == 1)
-    sql("reset")
+    sql(s"set carbon.input.segments.$databaseName.$tableName")
     dropTables(tableName)
   }
 
@@ -223,14 +223,14 @@ class TestCarbonSegmentUtil extends QueryTest {
     sql(s"create index si_index_table1 on table $tableName(c2) AS 'carbondata' ")
     assert(BroadCastSIFilterPushJoin
              .getFilteredSegments(s"select * from $tableName where c3='c3v1'",
-               Spark2TestQueryExecutor.spark).length == 2)
+               SparkTestQueryExecutor.spark).length == 2)
     assert(BroadCastSIFilterPushJoin
              .getFilteredSegments(s"select * from $tableName where c3='c3v1' or c2 ='2'",
-               Spark2TestQueryExecutor.spark).length == 4)
+               SparkTestQueryExecutor.spark).length == 4)
     val exception = intercept[UnsupportedOperationException] {
       BroadCastSIFilterPushJoin
         .getFilteredSegments(s"select * from si_index_table",
-          Spark2TestQueryExecutor.spark)
+          SparkTestQueryExecutor.spark)
     }
     exception.getMessage.contains("Get Filter Segments API supports if and only if " +
                                   "only one carbon main table is present in query.")
@@ -293,7 +293,7 @@ class TestCarbonSegmentUtil extends QueryTest {
       "P_CAP_TIME','bad_records_action'='force')")
     assert(BroadCastSIFilterPushJoin
              .getFilteredSegments(s"select * from $tableName",
-               Spark2TestQueryExecutor.spark).length == 1)
+               SparkTestQueryExecutor.spark).length == 1)
     dropTables(tableName)
   }
 
diff --git a/datamap/mv/core/pom.xml b/mv/core/pom.xml
similarity index 98%
rename from datamap/mv/core/pom.xml
rename to mv/core/pom.xml
index 5d0ab54..7a3166d 100644
--- a/datamap/mv/core/pom.xml
+++ b/mv/core/pom.xml
@@ -23,14 +23,14 @@
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
     <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../../../pom.xml</relativePath>
+    <relativePath>../../pom.xml</relativePath>
   </parent>
 
   <artifactId>carbondata-mv-core</artifactId>
   <name>Apache CarbonData :: Materialized View Core</name>
 
   <properties>
-    <dev.path>${basedir}/../../../dev</dev.path>
+    <dev.path>${basedir}/../../dev</dev.path>
     <jacoco.append>true</jacoco.append>
   </properties>
 
@@ -42,7 +42,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVAnalyzerRule.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVAnalyzerRule.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVAnalyzerRule.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVAnalyzerRule.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVDataMapProvider.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVDataMapProvider.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVDataMapProvider.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVDataMapProvider.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtension.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtension.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtension.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtension.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtensionSqlParser.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtensionSqlParser.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtensionSqlParser.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVExtensionSqlParser.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVHelper.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVHelper.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVHelper.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVHelper.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVParser.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVParser.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVParser.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVParser.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVUtil.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVUtil.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVUtil.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/MVUtil.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/CreateMaterializedViewCommand.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/CreateMaterializedViewCommand.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/CreateMaterializedViewCommand.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/CreateMaterializedViewCommand.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/DropMaterializedViewCommand.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/DropMaterializedViewCommand.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/DropMaterializedViewCommand.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/DropMaterializedViewCommand.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/RefreshMaterializedViewCommand.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/RefreshMaterializedViewCommand.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/RefreshMaterializedViewCommand.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/RefreshMaterializedViewCommand.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/ShowMaterializedViewCommand.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/ShowMaterializedViewCommand.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/ShowMaterializedViewCommand.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/extension/command/ShowMaterializedViewCommand.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MVUdf.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MVUdf.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MVUdf.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MVUdf.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchMaker.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchMaker.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchMaker.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchMaker.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Navigator.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Navigator.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Navigator.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Navigator.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/QueryRewrite.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/QueryRewrite.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/QueryRewrite.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/QueryRewrite.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Utils.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Utils.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Utils.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/Utils.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/session/MVSession.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/session/MVSession.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/session/MVSession.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/session/MVSession.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/session/internal/SessionState.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/session/internal/SessionState.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/session/internal/SessionState.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/session/internal/SessionState.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/Granularity.java b/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/Granularity.java
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/Granularity.java
rename to mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/Granularity.java
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesFunction.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesFunction.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesFunction.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesFunction.scala
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesUtil.scala b/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesUtil.scala
similarity index 100%
rename from datamap/mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesUtil.scala
rename to mv/core/src/main/scala/org/apache/carbondata/mv/timeseries/TimeSeriesUtil.scala
diff --git a/datamap/mv/core/src/main/spark2.3/org/apache/carbondata/mv/extension/MVOptimizer.scala b/mv/core/src/main/spark2.3/org/apache/carbondata/mv/extension/MVOptimizer.scala
similarity index 100%
rename from datamap/mv/core/src/main/spark2.3/org/apache/carbondata/mv/extension/MVOptimizer.scala
rename to mv/core/src/main/spark2.3/org/apache/carbondata/mv/extension/MVOptimizer.scala
diff --git a/datamap/mv/core/src/main/spark2.4/org/apache/carbondata/mv/extension/MVOptimizer.scala b/mv/core/src/main/spark2.4/org/apache/carbondata/mv/extension/MVOptimizer.scala
similarity index 100%
rename from datamap/mv/core/src/main/spark2.4/org/apache/carbondata/mv/extension/MVOptimizer.scala
rename to mv/core/src/main/spark2.4/org/apache/carbondata/mv/extension/MVOptimizer.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ExtractJoinConditionsSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ExtractJoinConditionsSuite.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ExtractJoinConditionsSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/plans/ExtractJoinConditionsSuite.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/IsSPJGHSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/IsSPJGHSuite.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/IsSPJGHSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/plans/IsSPJGHSuite.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/LogicalToModularPlanSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/LogicalToModularPlanSuite.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/LogicalToModularPlanSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/plans/LogicalToModularPlanSuite.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
similarity index 98%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
index 6305125..8933677 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
@@ -29,7 +29,6 @@ class ModularToSQLSuite extends ModularPlanTest with BeforeAndAfter {
 
   val spark = sqlContext
   val testHive = sqlContext.sparkSession
-  val hiveClient = CarbonSessionCatalogUtil.getClient(spark.sparkSession)
   
   ignore("convert modular plans to sqls") {
     
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
similarity index 97%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
index c44a55a..0153e5a 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
@@ -30,7 +30,6 @@ class SignatureSuite extends ModularPlanTest with BeforeAndAfterAll {
 
   val spark = sqlContext
   val testHive = sqlContext.sparkSession
-  val hiveClient = CarbonSessionCatalogUtil.getClient(spark.sparkSession)
   
   ignore("test signature computing") {
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/Tpcds_1_4_BenchmarkSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/plans/Tpcds_1_4_BenchmarkSuite.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/plans/Tpcds_1_4_BenchmarkSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/plans/Tpcds_1_4_BenchmarkSuite.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
similarity index 99%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
index 6e48758..633db24 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
@@ -35,10 +35,10 @@ class MVCreateTestCase extends QueryTest with BeforeAndAfterAll {
     drop()
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../../")
+    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../")
       .getCanonicalPath.replaceAll("\\\\", "/")
     val integrationPath = s"$projectPath/integration"
-    val resourcesPath = s"$integrationPath/spark-common-test/src/test/resources"
+    val resourcesPath = s"$integrationPath/spark/src/test/resources"
     sql(
       """
         | CREATE TABLE fact_table1 (empname String, designation String, doj Timestamp,
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
similarity index 99%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
index 4c05938..0df8d07 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
@@ -23,6 +23,7 @@ import org.scalatest.BeforeAndAfterAll
 class MVFilterAndJoinTest extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
+    defaultConfig()
     drop
     sql("create table main_table (name string,age int,height int) STORED AS carbondata")
     sql("create table dim_table (name string,age int,height int) STORED AS carbondata")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
similarity index 98%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
index 8431516..82338bb 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusMan
 class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
+    defaultConfig()
     sql("drop table IF EXISTS test_table")
     sql("drop table IF EXISTS test_table1")
     sql("drop table IF EXISTS main_table")
@@ -101,7 +102,7 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
     sql("Delete from table test_table1 where segment.id in (0)")
     sql("drop materialized view if exists datamap1")
     sql("create materialized view datamap1 with deferred refresh as select empname, designation " +
-      "from test_table")
+        "from test_table")
     loadDataToFactTable("test_table")
     loadDataToFactTable("test_table1")
     sql(s"refresh materialized view datamap1")
@@ -306,7 +307,6 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
     sql("drop table IF EXISTS test_table")
   }
 
-
   test("test set segments with main table having mv before refresh") {
     sql("drop table IF EXISTS main_table")
     sql("create table main_table(a string,b string,c int) STORED AS carbondata")
@@ -318,7 +318,8 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
     sql(s"refresh materialized view datamap1")
     val df = sql("select a, sum(c) from main_table  group by a")
     assert(!TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
-    sql("reset")
+    defaultConfig()
+    sqlContext.sparkSession.conf.unset("carbon.input.segments.default.main_table")
     checkAnswer(sql("select a, sum(c) from main_table  group by a"), Seq(Row("a", 1), Row("b", 2)))
     val df1= sql("select a, sum(c) from main_table  group by a")
     assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "datamap1"))
@@ -612,6 +613,13 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
 
 
   override def afterAll(): Unit = {
+    defaultConfig()
+    Seq("carbon.enable.auto.load.merge",
+      "carbon.input.segments.default.main_table",
+      "carbon.input.segments.default.test_table",
+      "carbon.input.segments.default.datamap1_table").foreach { key =>
+      sqlContext.sparkSession.conf.unset(key)
+    }
     sql("drop table if exists products")
     sql("drop table if exists sales")
     sql("drop table if exists products1")
@@ -639,4 +647,4 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
       s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE $tableName  OPTIONS
          |('DELIMITER'= ',', 'QUOTECHAR'= '"')""".stripMargin)
   }
-}
\ No newline at end of file
+}
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
similarity index 98%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
index e58a72a..a050e7e 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
@@ -31,10 +31,10 @@ class MVSampleTestCase extends QueryTest with BeforeAndAfterAll {
     drop()
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../../")
+    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../")
       .getCanonicalPath.replaceAll("\\\\", "/")
     val integrationPath = s"$projectPath/integration"
-    val resourcesPath = s"$integrationPath/spark-common-test/src/test/resources"
+    val resourcesPath = s"$integrationPath/spark/src/test/resources"
     sql("drop database if exists sample cascade")
     sql("create database sample")
     sql("use sample")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
similarity index 98%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
index 6fdc487..dbef5f5 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
@@ -32,10 +32,10 @@ class MVTPCDSTestCase extends QueryTest with BeforeAndAfterAll {
     drop()
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../../")
+    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../")
       .getCanonicalPath.replaceAll("\\\\", "/")
     val integrationPath = s"$projectPath/integration"
-    val resourcesPath = s"$integrationPath/spark-common-test/src/test/resources"
+    val resourcesPath = s"$integrationPath/spark/src/test/resources"
     sql("drop database if exists tpcds cascade")
     sql("create database tpcds")
     sql("use tpcds")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
similarity index 99%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
index f6e65ba..8490d0f 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
@@ -27,10 +27,10 @@ class MVTpchTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
     drop()
-    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../../")
+    val projectPath = new File(this.getClass.getResource("/").getPath + "../../../../")
       .getCanonicalPath.replaceAll("\\\\", "/")
     val integrationPath = s"$projectPath/integration"
-    val resourcesPath = s"$integrationPath/spark-common-test/src/test/resources"
+    val resourcesPath = s"$integrationPath/spark/src/test/resources"
 
     sql(s"""create table if not exists LINEITEM(  L_SHIPDATE date,  L_SHIPMODE string,  L_SHIPINSTRUCT string,  L_RETURNFLAG string,  L_RECEIPTDATE date,  L_ORDERKEY INT ,  L_PARTKEY INT ,  L_SUPPKEY   string,  L_LINENUMBER int,  L_QUANTITY double,  L_EXTENDEDPRICE double,  L_DISCOUNT double,  L_TAX double,  L_LINESTATUS string,  L_COMMITDATE date,  L_COMMENT  string) STORED AS carbondata""")
     sql(s"""create table if not exists ORDERS(  O_ORDERDATE date,  O_ORDERPRIORITY string,  O_ORDERSTATUS string,  O_ORDERKEY int,  O_CUSTKEY string,  O_TOTALPRICE double,  O_CLERK string,  O_SHIPPRIORITY int,  O_COMMENT string) STORED AS carbondata""")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectSelectExactChildrenSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectSelectExactChildrenSuite.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectSelectExactChildrenSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectSelectExactChildrenSuite.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
similarity index 98%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
index 78bcda3..2d444f7 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
@@ -420,7 +420,7 @@ class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
   test("test todate UDF function with mv") {
     sql("drop table IF EXISTS maintable")
     sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata")
-  sql("insert into maintable values(1, 'abc', 'abc001', '1975-06-11 01:00:03.0','1975-06-11 02:00:03.0', 120, 1234,4.34,24.56,12345, 2464, 45)")
+    sql("insert into maintable values(1, 'abc', 'abc001', '1975-06-11 01:00:03.0','1975-06-11 02:00:03.0', 120, 1234,4.34,24.56,12345, 2464, 45)")
     sql("drop materialized view if exists dm ")
     sql("create materialized view dm  as select max(to_date(dob)) , min(to_date(dob)) from maintable where to_date(dob)='1975-06-11' or to_date(dob)='1975-06-23'")
     checkExistence(sql("select max(to_date(dob)) , min(to_date(dob)) from maintable where to_date(dob)='1975-06-11' or to_date(dob)='1975-06-23'"), true, "1975-06-11 1975-06-11")
@@ -577,12 +577,15 @@ class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
     intercept[Exception] {
       sql("alter table maintable drop columns(c_code)")
     }.getMessage.contains("Column name cannot be dropped because it exists in mv materialized view: dm1")
-   sql("drop table if exists maintable")
+    sql("drop table if exists maintable")
   }
 
   test("drop meta cache on mv materialized view table") {
+    defaultConfig()
+    printConfiguration()
     sql("drop table IF EXISTS maintable")
     sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
+    printTable("maintable")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop materialized view if exists dm ")
     sql("create materialized view dm  as select name, sum(price) from maintable group by name")
@@ -606,6 +609,10 @@ class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
     assert(droppedCacheKeys.asScala.exists(key => key.startsWith(tablePath)))
 
     // check if cache does not have any more table index entries
+    cacheAfterDrop.asScala.foreach { key =>
+      LOGGER.error("cacheAfterDrop - key : " + key)
+    }
+    LOGGER.error("table path: " + tablePath)
     assert(!cacheAfterDrop.asScala.exists(key => key.startsWith(tablePath)))
 
     // Check if mv index entries are dropped
@@ -620,6 +627,5 @@ class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
     newSet.addAll(oldSet)
     newSet
   }
-  
-}
 
+}
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
similarity index 99%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
index 27e7d90..c781598 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
@@ -33,6 +33,7 @@ class TestPartitionWithMV extends QueryTest with BeforeAndAfterAll {
   val testData = s"$resourcesPath/sample.csv"
 
   override def beforeAll(): Unit = {
+    defaultConfig()
     sql("drop database if exists partition_mv cascade")
     sql("create database partition_mv")
     sql("use partition_mv")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
similarity index 97%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
index c6c7510..90cf7af 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
@@ -27,8 +27,7 @@ class TestSQLSuite extends ModularPlanTest with BeforeAndAfter {
 
   val spark = sqlContext
   val testHive = sqlContext.sparkSession
-  val hiveClient = CarbonSessionCatalogUtil.getClient(spark.sparkSession)
-  
+
   ignore("protypical mqo rewrite test") {
     
     hiveClient.runSqlHive(
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
similarity index 97%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
index ee6445a..9ae601b 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
+++ b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
@@ -31,7 +31,6 @@ class Tpcds_1_4_Suite extends ModularPlanTest with BeforeAndAfter {
 
   val spark = sqlContext
   val testHive = sqlContext.sparkSession
-  val hiveClient = CarbonSessionCatalogUtil.getClient(spark.sparkSession)
 
   test("test using tpc-ds queries") {
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestSQLBatch.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestSQLBatch.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestSQLBatch.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestSQLBatch.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestTPCDS_1_4_Batch.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestTPCDS_1_4_Batch.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestTPCDS_1_4_Batch.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/matching/TestTPCDS_1_4_Batch.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch2.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch2.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch2.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/testutil/TestSQLBatch2.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_QueryBatch.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_QueryBatch.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_QueryBatch.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_QueryBatch.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_Tables.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_Tables.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_Tables.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/testutil/Tpcds_1_4_Tables.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesQueryRollUp.scala b/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesQueryRollUp.scala
similarity index 100%
rename from datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesQueryRollUp.scala
rename to mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesQueryRollUp.scala
diff --git a/datamap/mv/plan/pom.xml b/mv/plan/pom.xml
similarity index 97%
rename from datamap/mv/plan/pom.xml
rename to mv/plan/pom.xml
index 27d32ed..d9d4bd3 100644
--- a/datamap/mv/plan/pom.xml
+++ b/mv/plan/pom.xml
@@ -23,20 +23,20 @@
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
     <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../../../pom.xml</relativePath>
+    <relativePath>../../pom.xml</relativePath>
   </parent>
 
   <artifactId>carbondata-mv-plan</artifactId>
   <name>Apache CarbonData :: Materialized View Plan</name>
 
   <properties>
-    <dev.path>${basedir}/../../../dev</dev.path>
+    <dev.path>${basedir}/../../dev</dev.path>
   </properties>
 
   <dependencies>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
+      <artifactId>carbondata-spark</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/dsl/package.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/dsl/package.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/dsl/package.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/dsl/package.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/expressions/modular/subquery.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/expressions/modular/subquery.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/expressions/modular/subquery.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/expressions/modular/subquery.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/AggregatePushDown.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/AggregatePushDown.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/AggregatePushDown.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/AggregatePushDown.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Flags.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Flags.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Flags.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Flags.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Harmonizer.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Harmonizer.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Harmonizer.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Harmonizer.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPatterns.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPatterns.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPatterns.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPatterns.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlan.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlan.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlan.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlan.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlanSignatureGenerator.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlanSignatureGenerator.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlanSignatureGenerator.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularPlanSignatureGenerator.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularRelation.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularRelation.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularRelation.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/ModularRelation.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Modularizer.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Modularizer.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Modularizer.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/Modularizer.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/basicOperators.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/basicOperators.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/basicOperators.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/basicOperators.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/queryGraph.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/queryGraph.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/queryGraph.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/modular/queryGraph.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/package.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/package.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/package.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/package.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/BirdcageOptimizer.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/BirdcageOptimizer.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/BirdcageOptimizer.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/BirdcageOptimizer.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Logical2ModularExtractions.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Logical2ModularExtractions.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Logical2ModularExtractions.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Logical2ModularExtractions.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/LogicalPlanSignatureGenerator.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/LogicalPlanSignatureGenerator.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/LogicalPlanSignatureGenerator.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/LogicalPlanSignatureGenerator.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Printers.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Printers.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Printers.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Printers.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuild.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuild.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuild.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuild.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuildDSL.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuildDSL.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuildDSL.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuildDSL.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuilder.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuilder.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuilder.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/SQLBuilder.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Signature.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Signature.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Signature.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/Signature.scala
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/TableCluster.scala b/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/TableCluster.scala
similarity index 100%
rename from datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/TableCluster.scala
rename to mv/plan/src/main/scala/org/apache/carbondata/mv/plans/util/TableCluster.scala
diff --git a/store/CSDK/CMakeLists.txt b/sdk/CSDK/CMakeLists.txt
similarity index 100%
rename from store/CSDK/CMakeLists.txt
rename to sdk/CSDK/CMakeLists.txt
diff --git a/store/CSDK/src/CarbonProperties.cpp b/sdk/CSDK/src/CarbonProperties.cpp
similarity index 100%
rename from store/CSDK/src/CarbonProperties.cpp
rename to sdk/CSDK/src/CarbonProperties.cpp
diff --git a/store/CSDK/src/CarbonProperties.h b/sdk/CSDK/src/CarbonProperties.h
similarity index 100%
rename from store/CSDK/src/CarbonProperties.h
rename to sdk/CSDK/src/CarbonProperties.h
diff --git a/store/CSDK/src/CarbonReader.cpp b/sdk/CSDK/src/CarbonReader.cpp
similarity index 100%
rename from store/CSDK/src/CarbonReader.cpp
rename to sdk/CSDK/src/CarbonReader.cpp
diff --git a/store/CSDK/src/CarbonReader.h b/sdk/CSDK/src/CarbonReader.h
similarity index 100%
rename from store/CSDK/src/CarbonReader.h
rename to sdk/CSDK/src/CarbonReader.h
diff --git a/store/CSDK/src/CarbonRow.cpp b/sdk/CSDK/src/CarbonRow.cpp
similarity index 100%
rename from store/CSDK/src/CarbonRow.cpp
rename to sdk/CSDK/src/CarbonRow.cpp
diff --git a/store/CSDK/src/CarbonRow.h b/sdk/CSDK/src/CarbonRow.h
similarity index 100%
rename from store/CSDK/src/CarbonRow.h
rename to sdk/CSDK/src/CarbonRow.h
diff --git a/store/CSDK/src/CarbonSchemaReader.cpp b/sdk/CSDK/src/CarbonSchemaReader.cpp
similarity index 100%
rename from store/CSDK/src/CarbonSchemaReader.cpp
rename to sdk/CSDK/src/CarbonSchemaReader.cpp
diff --git a/store/CSDK/src/CarbonSchemaReader.h b/sdk/CSDK/src/CarbonSchemaReader.h
similarity index 100%
rename from store/CSDK/src/CarbonSchemaReader.h
rename to sdk/CSDK/src/CarbonSchemaReader.h
diff --git a/store/CSDK/src/CarbonWriter.cpp b/sdk/CSDK/src/CarbonWriter.cpp
similarity index 100%
rename from store/CSDK/src/CarbonWriter.cpp
rename to sdk/CSDK/src/CarbonWriter.cpp
diff --git a/store/CSDK/src/CarbonWriter.h b/sdk/CSDK/src/CarbonWriter.h
similarity index 100%
rename from store/CSDK/src/CarbonWriter.h
rename to sdk/CSDK/src/CarbonWriter.h
diff --git a/store/CSDK/src/Configuration.cpp b/sdk/CSDK/src/Configuration.cpp
similarity index 100%
rename from store/CSDK/src/Configuration.cpp
rename to sdk/CSDK/src/Configuration.cpp
diff --git a/store/CSDK/src/Configuration.h b/sdk/CSDK/src/Configuration.h
similarity index 100%
rename from store/CSDK/src/Configuration.h
rename to sdk/CSDK/src/Configuration.h
diff --git a/store/CSDK/src/Schema.cpp b/sdk/CSDK/src/Schema.cpp
similarity index 100%
rename from store/CSDK/src/Schema.cpp
rename to sdk/CSDK/src/Schema.cpp
diff --git a/store/CSDK/src/Schema.h b/sdk/CSDK/src/Schema.h
similarity index 100%
rename from store/CSDK/src/Schema.h
rename to sdk/CSDK/src/Schema.h
diff --git a/store/CSDK/test/main.cpp b/sdk/CSDK/test/main.cpp
similarity index 99%
rename from store/CSDK/test/main.cpp
rename to sdk/CSDK/test/main.cpp
index 2e1b5e5..b221d08 100644
--- a/store/CSDK/test/main.cpp
+++ b/sdk/CSDK/test/main.cpp
@@ -748,7 +748,7 @@ bool testWriteDataWithSchemaFile(JNIEnv *env, char *path, int argc, char *argv[]
         writer.builder(env);
         writer.outputPath(path);
         writer.withCsvInput();
-        writer.withSchemaFile("../../../integration/spark-common/target/warehouse/add_segment_test/Metadata/schema");
+        writer.withSchemaFile("../../../integration/spark/target/warehouse/add_segment_test/Metadata/schema");
         writer.writtenBy("CSDK");
         writer.taskNo(15541554.81);
         writer.withThreadSafe(1);
diff --git a/store/sdk/pom.xml b/sdk/sdk/pom.xml
similarity index 98%
rename from store/sdk/pom.xml
rename to sdk/sdk/pom.xml
index 9e43a8b..3f8256e 100644
--- a/store/sdk/pom.xml
+++ b/sdk/sdk/pom.xml
@@ -11,8 +11,8 @@
     <relativePath>../../pom.xml</relativePath>
   </parent>
 
-  <artifactId>carbondata-store-sdk</artifactId>
-  <name>Apache CarbonData :: Store SDK</name>
+  <artifactId>carbondata-sdk</artifactId>
+  <name>Apache CarbonData :: SDK</name>
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/ArrowCarbonReader.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/ArrowCarbonReader.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/ArrowCarbonReader.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/ArrowCarbonReader.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CSVCarbonWriter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CSVCarbonWriter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/CSVCarbonWriter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CSVCarbonWriter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/JsonCarbonWriter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/JsonCarbonWriter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/JsonCarbonWriter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/JsonCarbonWriter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/RowUtil.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/RowUtil.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/RowUtil.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/RowUtil.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowConverter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowConverter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowConverter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowConverter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowFieldWriter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowFieldWriter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowFieldWriter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowFieldWriter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowUtils.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowUtils.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowUtils.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowUtils.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowWriter.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowWriter.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowWriter.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/arrow/ArrowWriter.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/utils/SDKUtil.java b/sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/utils/SDKUtil.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/sdk/file/utils/SDKUtil.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/utils/SDKUtil.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/CarbonRowReadSupport.java b/sdk/sdk/src/main/java/org/apache/carbondata/store/CarbonRowReadSupport.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/store/CarbonRowReadSupport.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/store/CarbonRowReadSupport.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java b/sdk/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/store/CarbonStore.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java b/sdk/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java b/sdk/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
similarity index 100%
rename from store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
rename to sdk/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
diff --git a/store/sdk/src/main/resources/log4j.properties b/sdk/sdk/src/main/resources/log4j.properties
similarity index 100%
rename from store/sdk/src/main/resources/log4j.properties
rename to sdk/sdk/src/main/resources/log4j.properties
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ArrowCarbonReaderTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonSchemaReaderTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonSchemaReaderTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonSchemaReaderTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonSchemaReaderTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentAvroSdkWriterTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentAvroSdkWriterTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentAvroSdkWriterTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentAvroSdkWriterTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkReaderTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkReaderTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkReaderTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkReaderTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkWriterTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkWriterTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkWriterTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ConcurrentSdkWriterTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/ImageTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ImageTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/ImageTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/ImageTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/MinMaxTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/MinMaxTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/MinMaxTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/MinMaxTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/MultithreadSDKBlockletReaderTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/MultithreadSDKBlockletReaderTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/sdk/file/MultithreadSDKBlockletReaderTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/MultithreadSDKBlockletReaderTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java b/sdk/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
diff --git a/store/sdk/src/test/java/org/apache/carbondata/util/BinaryUtil.java b/sdk/sdk/src/test/java/org/apache/carbondata/util/BinaryUtil.java
similarity index 100%
rename from store/sdk/src/test/java/org/apache/carbondata/util/BinaryUtil.java
rename to sdk/sdk/src/test/java/org/apache/carbondata/util/BinaryUtil.java
diff --git a/store/sdk/src/test/resources/image/carbondatalogo.jpg b/sdk/sdk/src/test/resources/image/carbondatalogo.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/carbondatalogo.jpg
rename to sdk/sdk/src/test/resources/image/carbondatalogo.jpg
diff --git a/store/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.jpg b/sdk/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.jpg
rename to sdk/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.jpg
diff --git a/store/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.txt b/sdk/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.txt
similarity index 100%
rename from store/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.txt
rename to sdk/sdk/src/test/resources/image/flowers/10686568196_b1915544a8.txt
diff --git a/store/sdk/src/test/resources/image/flowers/10712722853_5632165b04.jpg b/sdk/sdk/src/test/resources/image/flowers/10712722853_5632165b04.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/flowers/10712722853_5632165b04.jpg
rename to sdk/sdk/src/test/resources/image/flowers/10712722853_5632165b04.jpg
diff --git a/store/sdk/src/test/resources/image/flowers/10712722853_5632165b04.txt b/sdk/sdk/src/test/resources/image/flowers/10712722853_5632165b04.txt
similarity index 100%
rename from store/sdk/src/test/resources/image/flowers/10712722853_5632165b04.txt
rename to sdk/sdk/src/test/resources/image/flowers/10712722853_5632165b04.txt
diff --git a/store/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.jpg b/sdk/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.jpg
rename to sdk/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.jpg
diff --git a/store/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.txt b/sdk/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.txt
similarity index 100%
rename from store/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.txt
rename to sdk/sdk/src/test/resources/image/flowers/subfolder/10841136265_af473efc60.txt
diff --git a/store/sdk/src/test/resources/image/voc/2007_000027.jpg b/sdk/sdk/src/test/resources/image/voc/2007_000027.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000027.jpg
rename to sdk/sdk/src/test/resources/image/voc/2007_000027.jpg
diff --git a/store/sdk/src/test/resources/image/voc/2007_000027.xml b/sdk/sdk/src/test/resources/image/voc/2007_000027.xml
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000027.xml
rename to sdk/sdk/src/test/resources/image/voc/2007_000027.xml
diff --git a/store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.jpg b/sdk/sdk/src/test/resources/image/voc/2007_000032.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.jpg
rename to sdk/sdk/src/test/resources/image/voc/2007_000032.jpg
diff --git a/store/sdk/src/test/resources/image/voc/2007_000032.xml b/sdk/sdk/src/test/resources/image/voc/2007_000032.xml
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000032.xml
rename to sdk/sdk/src/test/resources/image/voc/2007_000032.xml
diff --git a/store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.jpg b/sdk/sdk/src/test/resources/image/voc/2007_000033.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.jpg
rename to sdk/sdk/src/test/resources/image/voc/2007_000033.jpg
diff --git a/store/sdk/src/test/resources/image/voc/2007_000033.xml b/sdk/sdk/src/test/resources/image/voc/2007_000033.xml
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000033.xml
rename to sdk/sdk/src/test/resources/image/voc/2007_000033.xml
diff --git a/store/sdk/src/test/resources/image/voc/2007_000039.jpg b/sdk/sdk/src/test/resources/image/voc/2007_000039.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000039.jpg
rename to sdk/sdk/src/test/resources/image/voc/2007_000039.jpg
diff --git a/store/sdk/src/test/resources/image/voc/2007_000039.xml b/sdk/sdk/src/test/resources/image/voc/2007_000039.xml
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000039.xml
rename to sdk/sdk/src/test/resources/image/voc/2007_000039.xml
diff --git a/store/sdk/src/test/resources/image/voc/2009_001444.jpg b/sdk/sdk/src/test/resources/image/voc/2009_001444.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2009_001444.jpg
rename to sdk/sdk/src/test/resources/image/voc/2009_001444.jpg
diff --git a/store/sdk/src/test/resources/image/voc/2009_001444.xml b/sdk/sdk/src/test/resources/image/voc/2009_001444.xml
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2009_001444.xml
rename to sdk/sdk/src/test/resources/image/voc/2009_001444.xml
diff --git a/store/sdk/src/test/resources/image/voc/2007_000032.jpg b/sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000032.jpg
rename to sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.jpg
diff --git a/store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.png b/sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.png
similarity index 100%
rename from store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.png
rename to sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000032.png
diff --git a/store/sdk/src/test/resources/image/voc/2007_000033.jpg b/sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/voc/2007_000033.jpg
rename to sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.jpg
diff --git a/store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.png b/sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.png
similarity index 100%
rename from store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.png
rename to sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000033.png
diff --git a/store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.jpg b/sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.jpg
similarity index 100%
rename from store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.jpg
rename to sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.jpg
diff --git a/store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.png b/sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.png
similarity index 100%
rename from store/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.png
rename to sdk/sdk/src/test/resources/image/vocForSegmentationClass/2007_000042.png