You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by aj...@apache.org on 2020/10/21 12:26:59 UTC

[carbondata] branch master updated: [CARBONDATA-3889] Enable scalastyle check for all scala test code

This is an automated email from the ASF dual-hosted git repository.

ajantha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new c7b0c9d  [CARBONDATA-3889] Enable scalastyle check for all scala test code
c7b0c9d is described below

commit c7b0c9d50f196aee953108f2529f28a80676aecb
Author: QiangCai <qi...@qq.com>
AuthorDate: Sat Sep 19 10:05:36 2020 +0800

    [CARBONDATA-3889] Enable scalastyle check for all scala test code
    
    Why is this PR needed?
    there are many code style issues in test source code
    it results in ugly code
    
    What changes were proposed in this PR?
    keep long sql if needed by using comment "// scalastyle:off lineLength"
    optimize import for all scala test cases
    fix code style issues
    
    Does this PR introduce any user interface change?
    No
    
    Is any new test case added?
    No
    
    This closes #3950
---
 .../carbondata/core/locks/LocalFileLock.java       |    5 +-
 .../carbondata/core/util/CarbonProperties.java     |    8 +
 .../apache/carbondata/core/util/CarbonUtil.java    |   10 +-
 .../apache/carbondata/examplesCI/RunExamples.scala |    6 +-
 .../CarbonDataFileMergeTestCaseOnSI.scala          |   50 +-
 .../CarbonIndexFileMergeTestCaseWithSI.scala       |  106 +-
 .../testsuite/secondaryindex/DropTableTest.scala   |   16 +-
 .../InsertIntoCarbonTableTestCase.scala            |   32 +-
 .../TestAlterTableColumnRenameWithIndex.scala      |   14 +-
 .../TestBroadCastSIFilterPushJoinWithUDF.scala     |   71 +-
 .../secondaryindex/TestCTASWithIndex.scala         |   18 +-
 .../secondaryindex/TestCacheOperationsForSI.scala  |    2 +-
 .../testsuite/secondaryindex/TestCarbonJoin.scala  |    5 +-
 .../TestCreateIndexForCleanAndDeleteSegment.scala  |   13 +-
 .../secondaryindex/TestCreateIndexTable.scala      |  169 +-
 .../TestCreateIndexWithLoadAndCompaction.scala     |   21 +-
 .../TestIndexModelForORFilterPushDown.scala        |   12 +-
 .../TestIndexModelWithAggQueries.scala             |   28 +-
 .../secondaryindex/TestIndexModelWithIUD.scala     |  432 +-
 .../TestIndexModelWithLocalDictionary.scala        |   32 +-
 .../TestIndexModelWithUnsafeColumnPage.scala       |   11 +-
 .../testsuite/secondaryindex/TestIndexRepair.scala |   22 +-
 ...WithIndexModelOnFirstColumnAndSortColumns.scala |   12 +-
 .../secondaryindex/TestLikeQueryWithIndex.scala    |   24 +-
 .../secondaryindex/TestNIQueryWithIndex.scala      |   28 +-
 .../TestRegisterIndexCarbonTable.scala             |   11 +-
 .../secondaryindex/TestSIWithAddSegment.scala      |    4 +-
 .../TestSIWithComplexArrayType.scala               |   28 +-
 .../secondaryindex/TestSIWithPartition.scala       |   18 +-
 ...yIndex.scala => TestSIWithSecondaryIndex.scala} |  281 +-
 .../apache/spark/util/TestCarbonSegmentUtil.scala  |    6 +-
 .../carbon/flink/TestCarbonPartitionWriter.scala   |   45 +-
 .../org/apache/carbon/flink/TestCarbonWriter.scala |   57 +-
 .../apache/carbon/flink/TestDeleteStageFiles.scala |   22 +-
 .../scala/org/apache/carbon/flink/TestSource.scala |   33 +-
 .../PrestoAllDataTypeLocalDictTest.scala           |   16 +-
 .../integrationtest/PrestoAllDataTypeTest.scala    |  109 +-
 .../PrestoTestNonTransactionalTableFiles.scala     |  112 +-
 .../presto/util/CarbonDataStoreCreator.scala       |   30 +-
 integration/spark-common-cluster-test/pom.xml      |    8 +-
 .../cluster/sdv/generated/AlterTableTestCase.scala |  211 +-
 .../cluster/sdv/generated/BadRecordTestCase.scala  |   46 +-
 .../sdv/generated/BloomFilterIndexTestCase.scala   |    3 +-
 .../sdv/generated/ComplexDataTypeTestCase.scala    |   52 +-
 .../generated/CreateTableAsSelectTestCase.scala    |   41 +-
 .../CreateTableWithLocalDictionaryTestCase.scala   |    3 +-
 .../sdv/generated/DataLoadingIUDTestCase.scala     | 7167 ++++++++++----------
 .../sdv/generated/DataLoadingTestCase.scala        |  325 +-
 .../sdv/generated/DataLoadingV3TestCase.scala      |  147 +-
 .../cluster/sdv/generated/GlobalSortTestCase.scala |  101 +-
 .../sdv/generated/InvertedindexTestCase.scala      |   74 +-
 .../LoadTableWithLocalDictionaryTestCase.scala     |   33 +-
 .../cluster/sdv/generated/LuceneTestCase.scala     |   17 +-
 .../cluster/sdv/generated/MergeIndexTestCase.scala |    8 +-
 .../sdv/generated/OffheapQuery1TestCase.scala      | 1761 +++--
 .../sdv/generated/OffheapQuery2TestCase.scala      |  241 +-
 .../sdv/generated/OffheapSort1TestCase.scala       |   37 +-
 .../sdv/generated/OffheapSort2TestCase.scala       |   37 +-
 .../sdv/generated/PreAggregateTestCase.scala       |    0
 .../sdv/generated/PrestoSampleTestCase.scala       |   12 +-
 .../cluster/sdv/generated/QueriesBVATestCase.scala | 2780 ++++----
 .../sdv/generated/QueriesBasicTestCase.scala       | 3134 +++++----
 .../sdv/generated/QueriesCompactionTestCase.scala  | 1534 ++---
 .../sdv/generated/QueriesNormalTestCase.scala      |  114 +-
 .../sdv/generated/QueriesRangeFilterTestCase.scala | 1240 ++--
 .../generated/QueriesSparkBlockDistTestCase.scala  |   60 +-
 .../cluster/sdv/generated/SDKwriterTestCase.scala  |  199 +-
 .../sdv/generated/SetParameterTestCase.scala       |    6 +-
 .../cluster/sdv/generated/ShowLoadsTestCase.scala  |   13 +-
 .../generated/SortColumnExcudeDictTestCase.scala   |   68 +-
 .../cluster/sdv/generated/SortColumnTestCase.scala |   74 +-
 .../sdv/generated/StandardPartitionTestCase.scala  |   99 +-
 .../generated/TableCommentAlterTableTestCase.scala |   62 +-
 .../generated/TestPartitionWithGlobalSort.scala    |   89 +-
 .../sdv/generated/TimestamptypesTestCase.scala     |   17 +-
 .../sdv/generated/V3offheapvectorTestCase.scala    |   89 +-
 .../cluster/sdv/generated/Vector1TestCase.scala    |  136 +-
 .../cluster/sdv/generated/Vector2TestCase.scala    |  137 +-
 ...teTableUsingSparkCarbonFileFormatTestCase.scala |   60 +-
 .../datasource/SparkCarbonDataSourceTestCase.scala |   16 +-
 .../sdv/register/TestRegisterCarbonTable.scala     |   21 +-
 .../carbondata/cluster/sdv/suite/SDVSuites.scala   |    5 +-
 .../spark/sql/common/util/CarbonFunSuite.scala     |    3 +-
 .../spark/sql/common/util/DataSourceTestUtil.scala |    5 +-
 .../apache/spark/sql/common/util/QueryTest.scala   |   21 +-
 .../org/apache/spark/sql/common/util/Tags.scala    |   24 +-
 .../apache/spark/sql/test/TestQueryExecutor.scala  |    1 +
 .../org/apache/spark/sql/test/util/QueryTest.scala |    5 +-
 .../org/apache/carbondata/sdk/util/BinaryUtil.java |   11 +-
 .../stream/CarbonStreamRecordReaderTest.java       |    1 -
 .../scala/org/apache/carbondata/geo/GeoTest.scala  |   18 +
 .../bloom/BloomCoarseGrainIndexFunctionSuite.scala |  161 +-
 .../index/bloom/BloomCoarseGrainIndexSuite.scala   |   92 +-
 .../bloom/BloomCoarseGrainIndexTestUtil.scala      |   21 +-
 .../index/lucene/LuceneCoarseGrainIndexSuite.scala |    2 +-
 .../index/lucene/LuceneFineGrainIndexSuite.scala   |  102 +-
 .../aggquery/IntegerDataTypeTestCase.scala         |   36 +-
 .../spark/testsuite/bigdecimal/TestBigInt.scala    |   29 +-
 .../TestDimensionWithDecimalDataType.scala         |    7 +-
 .../testsuite/binary/TestBinaryDataType.scala      |  232 +-
 .../complexType/TestAdaptiveComplexType.scala      |  128 +-
 .../TestAdaptiveEncodingForNullValues.scala        |   40 +-
 ...ncodingUnsafeColumnPageForComplexDataType.scala |    5 +-
 .../complexType/TestAllComplexDataType.scala       |   86 +-
 .../complexType/TestArrayContainsPushDown.scala    |   59 +-
 .../complexType/TestCompactionComplexType.scala    |  351 +-
 .../complexType/TestComplexDataType.scala          |  187 +-
 .../complexType/TestComplexTypeWithBigArray.scala  |    4 +-
 .../complexType/TestCreateTableWithDouble.scala    |    5 +-
 .../dataload/MultiFilesDataLoagdingTestCase.scala  |    2 +-
 .../testsuite/dataload/TestLoadDataGeneral.scala   |   89 +-
 .../dataload/TestLoadDataWithAutoLoadMerge.scala   |    7 +-
 .../dataload/TestLoadDataWithBlankLine.scala       |   10 +-
 .../dataload/TestLoadDataWithCompression.scala     |  118 +-
 .../TestLoadDataWithEmptyArrayColumns.scala        |    3 +-
 .../dataload/TestLoadDataWithJunkChars.scala       |    4 +-
 .../TestLoadDataWithSortColumnBounds.scala         |   58 +-
 .../dataload/TestLoadDataWithYarnLocalDirs.scala   |    5 +-
 .../dataload/TestNoInvertedIndexLoadAndQuery.scala |   49 +-
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala      |   10 +-
 .../spark/testsuite/emptyrow/TestEmptyRows.scala   |    7 +-
 .../testsuite/emptyrow/TestSkipEmptyLines.scala    |   46 +-
 .../primitiveTypes/ArrayDataTypeTestCase.scala     |    3 +-
 .../primitiveTypes/DoubleDataTypeTestCase.scala    |   15 +-
 .../primitiveTypes/FloatDataTypeTestCase.scala     |    2 +-
 .../primitiveTypes/MapDataTypeTestCase.scala       |    2 +-
 .../TestAdaptiveEncodingForPrimitiveTypes.scala    |  209 +-
 .../spark/testsuite/ShowTable/TestShowTable.scala  |    4 +-
 .../carbondata/spark/testsuite/TestCarbonCli.scala |    8 +-
 .../testsuite/addsegment/AddSegmentTestCase.scala  |  320 +-
 .../aggquery/AllDataTypesTestCaseAggregate.scala   |   23 +-
 .../testsuite/aggquery/AverageQueryTestCase.scala  |    6 +-
 .../allqueries/AllDataTypesTestCase.scala          |  322 +-
 .../testsuite/allqueries/DoubleDataTypeTest.scala  |   58 +-
 .../InsertIntoCarbonTableSpark2TestCase.scala      |    9 +-
 .../allqueries/InsertIntoCarbonTableTestCase.scala |   23 +-
 .../allqueries/MeasureOnlyTableTestCases.scala     |   77 +-
 .../allqueries/TestPruneUsingSegmentMinMax.scala   |   32 +-
 ...ryWithColumnMetCacheAndCacheLevelProperty.scala |   58 +-
 .../alterTable/TestAlterTableAddColumns.scala      |   81 +-
 .../TestAlterTableCompactionLevelThreshold.scala   |   20 +-
 .../TestAlterTableSortColumnsProperty.scala        |  393 +-
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |   34 +-
 .../badrecordloger/BadRecordActionTest.scala       |   52 +-
 .../badrecordloger/BadRecordEmptyDataTest.scala    |   16 +-
 .../badrecordloger/BadRecordLoggerTest.scala       |   11 +-
 .../testsuite/bigdecimal/TestBigDecimal.scala      |   22 +-
 .../bigdecimal/TestNullAndEmptyFields.scala        |   19 +-
 .../bigdecimal/TestNullAndEmptyFieldsUnsafe.scala  |   19 +-
 .../blockprune/BlockPruneQueryTestCase.scala       |   31 +-
 .../CarbonCustomBlockDistributionTest.scala        |   14 +-
 .../booleantype/BooleanDataTypesBaseTest.scala     |   11 +-
 .../booleantype/BooleanDataTypesBigFileTest.scala  |   65 +-
 .../booleantype/BooleanDataTypesFilterTest.scala   |   89 +-
 .../booleantype/BooleanDataTypesInsertTest.scala   |  127 +-
 .../booleantype/BooleanDataTypesLoadTest.scala     |  135 +-
 .../BooleanDataTypesParameterTest.scala            |   32 +-
 .../booleantype/BooleanDataTypesSortTest.scala     |   24 +-
 .../compress/TestBooleanCompressSuite.scala        |    5 +-
 .../testsuite/cloud/AllDataSourceTestCase.scala    |  100 +-
 .../testsuite/cloud/CacheRefreshTestCase.scala     |   17 +
 .../compaction/TestHybridCompaction.scala          |   23 +-
 .../TestAlterTableWithTableComment.scala           |    4 +-
 ...bonFileInputFormatWithExternalCarbonTable.scala |   51 +-
 .../TestCreateDDLForComplexMapType.scala           |   75 +-
 .../createTable/TestCreateExternalTable.scala      |    2 +-
 .../TestCreateHiveTableWithCarbonDS.scala          |    3 +-
 .../createTable/TestCreateTableAsSelect.scala      |   71 +-
 .../createTable/TestCreateTableIfNotExists.scala   |    2 +-
 .../createTable/TestCreateTableLike.scala          |   20 +-
 .../TestCreateTableWithBlockletSize.scala          |    3 +-
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |   74 +-
 .../TestCreateTableWithCompactionOptions.scala     |   14 +-
 .../TestCreateTableWithSpaceInColumnName.scala     |   25 +-
 .../TestNonTransactionalCarbonTable.scala          |  122 +-
 .../TestNonTransactionalCarbonTableForBinary.scala |   24 +-
 ...TestNonTransactionalCarbonTableForMapType.scala |   36 +-
 ...TestNonTransactionalCarbonTableJsonWriter.scala |   17 +-
 ...nTransactionalCarbonTableWithAvroDataType.scala |  549 +-
 ...onTransactionalCarbonTableWithComplexType.scala |  599 +-
 .../createTable/TestRenameTableWithIndex.scala     |   20 +-
 .../CarbonIndexFileMergeTestCase.scala             |  183 +-
 .../CompactionSupportGlobalSortBigFileTest.scala   |   16 +-
 .../CompactionSupportGlobalSortFunctionTest.scala  |   42 +-
 .../CompactionSupportGlobalSortParameterTest.scala |  109 +-
 .../CompactionSupportSpecifiedSegmentsTest.scala   |   34 +-
 .../DataCompactionBlockletBoundryTest.scala        |   28 +-
 .../DataCompactionBoundaryConditionsTest.scala     |    5 +-
 .../DataCompactionCardinalityBoundryTest.scala     |   55 +-
 .../datacompaction/DataCompactionLockTest.scala    |   20 +-
 .../MajorCompactionIgnoreInMinorTest.scala         |   32 +-
 .../MajorCompactionStopsAfterCompaction.scala      |   40 +-
 .../MajorCompactionWithMeasureSortColumns.scala    |    5 +-
 .../TableLevelCompactionOptionTest.scala           |   67 +-
 .../dataload/TestDataLoadPartitionCoalescer.scala  |    2 +-
 .../TestDataLoadWithColumnsMoreThanSchema.scala    |    7 +-
 .../dataload/TestDataLoadWithFileName.scala        |   14 +-
 .../dataload/TestGlobalSortDataLoad.scala          |  104 +-
 .../testsuite/dataload/TestLoadDataFrame.scala     |   66 +-
 .../dataload/TestLoadDataUseAllDictionary.scala    |    4 +-
 ...tLoadDataWithDictionaryExcludeAndInclude.scala} |   17 +-
 .../TestLoadDataWithDiffTimestampFormat.scala      |   68 +-
 .../TestLoadDataWithFileHeaderException.scala      |   22 +-
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala  |  150 +-
 .../TestLoadDataWithHiveSyntaxUnsafe.scala         |  307 +-
 ...adDataWithMalformedCarbonCommandException.scala |    2 +-
 .../dataload/TestLoadDataWithNoMeasure.scala       |   10 +-
 .../TestLoadDataWithNotProperInputFile.scala       |    2 +-
 .../spark/testsuite/dataload/TestLoadOptions.scala |    3 +-
 .../dataload/TestLoadTblNameIsKeyword.scala        |   33 +-
 .../dataload/TestLoadWithSortTempCompressed.scala  |   18 +-
 .../dataload/TestRangeColumnDataLoad.scala         |   28 +-
 .../dataload/TestTableLevelBlockSize.scala         |   17 +-
 .../testsuite/dataload/TestTableLoadMinSize.scala  |   22 +-
 .../dataretention/DataRetentionTestCase.scala      |   26 +-
 .../dblocation/DBLocationCarbonTableTestCase.scala |   88 +-
 .../deleteTable/TestDeleteTableNewDDL.scala        |   30 +-
 .../describeTable/TestDescribeTable.scala          |   24 +-
 .../detailquery/AllQueriesSpark2TestCase.scala     |    5 +-
 .../testsuite/detailquery/CastColumnTestCase.scala |   21 +-
 .../ColumnPropertyValidationTestCase.scala         |    7 +-
 .../detailquery/ExpressionWithNullTestCase.scala   |  320 +-
 .../HighCardinalityDataTypesTestCase.scala         |   94 +-
 .../detailquery/IntegerDataTypeTestCase.scala      |    9 +-
 .../detailquery/NoDictionaryColumnTestCase.scala   |   24 +-
 .../RangeFilterAllDataTypesTestCases.scala         |  126 +-
 .../detailquery/RangeFilterTestCase.scala          |   23 +-
 .../SubqueryWithFilterAndSortTestCase.scala        |   37 +-
 .../ValueCompressionDataTypeTestCase.scala         |   69 +-
 .../DateDataTypeDirectDictionaryTest.scala         |   20 +-
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |   14 +-
 .../DateDataTypeNullDataTest.scala                 |   11 +-
 ...TimestampDataTypeDirectDictionaryTestCase.scala |   28 +-
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |   10 +-
 .../TimestampDataTypeNullDataTest.scala            |   14 +-
 .../TimestampNoDictionaryColumnCastTestCase.scala  |    9 +-
 .../TimestampNoDictionaryColumnTestCase.scala      |   17 +-
 .../filterexpr/AllDataTypesTestCaseFilter.scala    |   13 +-
 .../testsuite/filterexpr/CountStarTestCase.scala   |   12 +-
 .../filterexpr/FilterProcessorTestCase.scala       |  142 +-
 .../filterexpr/GrtLtFilterProcessorTestCase.scala  |   53 +-
 .../filterexpr/IntegerDataTypeTestCase.scala       |   11 +-
 .../NullMeasureValueTestCaseFilter.scala           |    7 +-
 .../TestAndEqualFilterEmptyOperandValue.scala      |    3 +-
 .../testsuite/filterexpr/TestBetweenFilter.scala   |   12 +-
 .../testsuite/filterexpr/TestGrtLessFilter.scala   |    3 +-
 .../filterexpr/TestImplicitFilterExpression.scala  |    7 +-
 .../testsuite/filterexpr/TestIsNullFilter.scala    |   23 +-
 .../testsuite/filterexpr/TestNotNullFilter.scala   |    8 +-
 .../FlatFolderTableLoadingTestCase.scala           |   27 +-
 .../spark/testsuite/index/CGIndexTestCase.scala    |   49 +-
 .../spark/testsuite/index/FGIndexTestCase.scala    |   48 +-
 .../spark/testsuite/index/IndexWriterSuite.scala   |   82 +-
 .../spark/testsuite/index/TestIndexCommand.scala   |    6 +-
 .../spark/testsuite/index/TestIndexStatus.scala    |   39 +-
 .../InsertIntoNonCarbonTableTestCase.scala         |   55 +-
 .../testsuite/iud/DeleteCarbonTableTestCase.scala  |   52 +-
 .../iud/HorizontalCompactionTestCase.scala         |  129 +-
 .../iud/TestInsertAndOtherCommandConcurrent.scala  |   57 +-
 .../iud/TestUpdateAndDeleteWithLargeData.scala     |    4 +-
 .../testsuite/iud/UpdateCarbonTableTestCase.scala  |  471 +-
 .../UpdateCarbonTableTestCaseWithBadRecord.scala   |   29 +-
 .../joinquery/AllDataTypesTestCaseJoin.scala       |   11 +-
 .../joinquery/IntegerDataTypeTestCase.scala        |    9 +-
 .../joinquery/JoinWithoutDictionaryColumn.scala    |   30 +-
 .../testsuite/joinquery/OrderByLimitTestCase.scala |   14 +-
 .../LocalDictionarySupportAlterTableTest.scala     |  191 +-
 .../LocalDictionarySupportCreateTableTest.scala    |  319 +-
 .../LocalDictionarySupportLoadTableTest.scala      |   63 +-
 .../longstring/VarcharDataTypesBasicTestCase.scala |  227 +-
 .../NullMeasureValueTestCaseAggregate.scala        |    9 +-
 .../spark/testsuite/merge/MergeTestCase.scala      |  179 +-
 .../TestNullValueSerialization.scala               |    7 +-
 .../testsuite/partition/TestShowPartitions.scala   |   16 +-
 .../partition/TestUpdateForPartitionTable.scala    |    8 +-
 .../sdk/TestSDKWithTransactionalTable.scala        |   30 +-
 .../testsuite/segment/ShowSegmentTestCase.scala    |   62 +-
 .../segmentreading/TestSegmentReading.scala        |  170 +-
 .../TestSegmentReadingForMultiThreading.scala      |   27 +-
 .../testsuite/sortcolumns/TestSortColumns.scala    |   38 +-
 .../sortcolumns/TestSortColumnsWithUnsafe.scala    |   24 +-
 .../sortexpr/AllDataTypesTestCaseSort.scala        |   22 +-
 .../sortexpr/IntegerDataTypeTestCase.scala         |    9 +-
 .../StandardPartitionBadRecordLoggerTest.scala     |   12 +-
 .../StandardPartitionComplexDataTypeTestCase.scala |    2 +-
 .../StandardPartitionGlobalSortTestCase.scala      |   36 +-
 .../StandardPartitionTableCleanTestCase.scala      |   76 +-
 .../StandardPartitionTableCompactionTestCase.scala |   21 +-
 .../StandardPartitionTableDropTestCase.scala       |   11 +-
 .../StandardPartitionTableLoadingTestCase.scala    |   39 +-
 .../StandardPartitionTableOverwriteTestCase.scala  |    9 +-
 .../StandardPartitionTableQueryTestCase.scala      |   95 +-
 .../windowsexpr/WindowsExprTestCase.scala          |   16 +-
 .../carbondata/spark/util/BadRecordUtil.scala      |    4 +-
 .../spark/util/DataTypeConverterUtilSuite.scala    |    7 +-
 .../util/ExternalColumnDictionaryTestCase.scala    |    0
 .../sql/commands/StoredAsCarbondataSuite.scala     |   17 +-
 .../sql/commands/TestCarbonShowCacheCommand.scala  |  115 +-
 .../sql/commands/UsingCarbondataSuite.scala        |   22 +-
 .../carbondata/store/SparkCarbonStoreTest.scala    |    6 +-
 .../scala/org/apache/carbondata/view/MVTest.scala  |   17 +-
 .../view/plans/ExtractJoinConditionsSuite.scala    |   49 +-
 .../carbondata/view/plans/IsSPJGHSuite.scala       |   35 +-
 .../view/plans/LogicalToModularPlanSuite.scala     |  200 +-
 .../carbondata/view/plans/ModularToSQLSuite.scala  |   37 +-
 .../carbondata/view/plans/SignatureSuite.scala     |   64 +-
 .../view/rewrite/MVCoalesceTestCase.scala          |   50 +-
 .../view/rewrite/MVCountAndCaseTestCase.scala      |   10 +-
 .../carbondata/view/rewrite/MVCreateTestCase.scala |   46 +-
 .../view/rewrite/MVExceptionTestCase.scala         |   16 +-
 .../view/rewrite/MVFilterAndJoinTest.scala         |   19 +-
 .../rewrite/MVIncrementalLoadingTestcase.scala     |  122 +-
 .../view/rewrite/MVInvalidTestCase.scala           |    4 +-
 .../view/rewrite/MVMultiJoinTestCase.scala         |   17 +-
 .../view/rewrite/MVRewriteTestCase.scala           |    4 +-
 .../carbondata/view/rewrite/MVSampleTestCase.scala |    6 +-
 .../carbondata/view/rewrite/MVTPCDSTestCase.scala  |    6 +-
 .../carbondata/view/rewrite/MVTpchTestCase.scala   |    7 +-
 .../view/rewrite/SelectAllColumnsSuite.scala       |    4 +-
 .../view/rewrite/TestAllOperationsOnMV.scala       |   78 +-
 .../view/rewrite/TestPartitionWithMV.scala         |  152 +-
 .../carbondata/view/rewrite/TestSQLSuite.scala     |   28 +-
 .../carbondata/view/rewrite/Tpcds_1_4_Suite.scala  |   35 +-
 .../view/rewrite/matching/TestSQLBatch.scala       |    4 +-
 .../rewrite/matching/TestTPCDS_1_4_Batch.scala     |    3 +
 .../carbondata/view/testutil/ModularPlanTest.scala |    7 +-
 .../carbondata/view/testutil/TestSQLBatch.scala    |    6 +-
 .../carbondata/view/testutil/TestSQLBatch2.scala   |    8 +-
 .../view/testutil/Tpcds_1_4_QueryBatch.scala       |    1 +
 .../view/testutil/Tpcds_1_4_Tables.scala           |    1 +
 .../timeseries/TestCreateMVWithTimeSeries.scala    |   44 +-
 .../timeseries/TestMVTimeSeriesLoadAndQuery.scala  |   64 +-
 .../timeseries/TestMVTimeSeriesQueryRollUp.scala   |   88 +-
 .../indexserver/DistributedRDDUtilsTest.scala      |   32 +-
 .../org/apache/indexserver/IndexServerTest.scala   |   25 +-
 .../carbondata/BadRecordPathLoadOptionTest.scala   |   15 +-
 .../spark/carbondata/CarbonDataSourceSuite.scala   |   28 +-
 .../carbondata/DataLoadFailAllTypeSortTest.scala   |   20 +-
 .../spark/carbondata/TableStatusBackupTest.scala   |    2 +-
 .../carbondata/TestStreamingTableOpName.scala      |   47 +-
 .../carbondata/TestStreamingTableQueryFilter.scala |   30 +-
 .../TestStreamingTableWithLongString.scala         |   50 +-
 .../TestStreamingTableWithRowParser.scala          |   16 +-
 .../bucketing/TableBucketingTestCase.scala         |   29 +-
 .../carbondata/commands/SetCommandTestCase.scala   |   11 +-
 .../datatype/NumericDimensionBadRecordTest.scala   |   13 +-
 .../iud/DeleteCarbonTableSubqueryTestCase.scala    |   26 +-
 .../spark/carbondata/query/SubQueryTestSuite.scala |    4 +-
 .../register/TestRegisterCarbonTable.scala         |   44 +-
 .../restructure/AlterTableUpgradeSegmentTest.scala |   21 +-
 .../restructure/AlterTableValidationTestCase.scala |   70 +-
 .../vectorreader/AddColumnTestCases.scala          |  193 +-
 .../AlterTableColumnRenameTestCase.scala           |   52 +-
 .../vectorreader/ChangeDataTypeTestCases.scala     |   15 +-
 .../vectorreader/DropColumnTestCases.scala         |    8 +-
 .../vectorreader/VectorReaderTestCase.scala        |    8 +-
 .../apache/spark/sql/CarbonExtensionSuite.scala    |   21 +-
 .../spark/sql/GetDataSizeAndIndexSizeTest.scala    |    9 +-
 .../SparkCarbonDataSourceBinaryTest.scala          |   23 +-
 .../datasource/SparkCarbonDataSourceTest.scala     |   33 +-
 ...TestCreateTableUsingSparkCarbonFileFormat.scala |  236 +-
 .../org/apache/spark/sql/common/util/Tags.scala    |   25 +-
 .../mutation/CarbonTruncateCommandTest.scala       |    2 +-
 .../apache/spark/sql/profiler/ProfilerSuite.scala  |   12 +-
 .../org/apache/spark/util/CarbonCommandSuite.scala |    2 -
 .../org/apache/spark/util/SparkUtil4Test.scala     |   12 +-
 .../org/apache/spark/util/SparkUtilTest.scala      |    2 +-
 pom.xml                                            |    3 +-
 scalastyle-config.xml                              |    6 +-
 .../sdk/file/PaginationCarbonReaderTest.java       |    2 +-
 370 files changed, 19660 insertions(+), 17034 deletions(-)

diff --git a/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java b/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
index 0d909d5..9b88eea 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java
@@ -81,7 +81,10 @@ public class LocalFileLock extends AbstractCarbonLock {
       if (!FileFactory.isFileExist(lockFilePath)) {
         FileFactory.createNewLockFile(lockFilePath);
       }
-
+      if (channel != null) {
+        CarbonUtil.closeStreams(channel);
+        channel = null;
+      }
       channel = FileChannel.open(Paths.get(lockFilePath), StandardOpenOption.WRITE,
           StandardOpenOption.APPEND);
       try {
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 462e459..fe61c89 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -2122,4 +2122,12 @@ public final class CarbonProperties {
         CarbonCommonConstants.CARBON_REORDER_FILTER_DEFAULT)
     );
   }
+
+  /**
+   * for test to print current configuration
+   */
+  @Override
+  public String toString() {
+    return carbonProperties.toString();
+  }
 }
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 8d77e05..db04841 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2409,10 +2409,12 @@ public final class CarbonUtil {
           LOGGER.error("Not able to acquire the lock for Table status update for table");
         }
       } finally {
-        if (carbonLock.unlock()) {
-          LOGGER.debug("Table unlocked successfully after table status update");
-        } else {
-          LOGGER.error("Unable to unlock Table lock for table during table status update");
+        if (updateSize) {
+          if (carbonLock.unlock()) {
+            LOGGER.debug("Table unlocked successfully after table status update");
+          } else {
+            LOGGER.error("Unable to unlock Table lock for table during table status update");
+          }
         }
       }
     }
diff --git a/examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala b/examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
index 7b09e57..64a2478 100644
--- a/examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
+++ b/examples/spark/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
@@ -19,13 +19,13 @@ package org.apache.carbondata.examplesCI
 
 import java.io.File
 
-import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.SparkSqlAdapter
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.examples._
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.examples._
 import org.apache.carbondata.examples.sdk.CarbonReaderExample
 import org.apache.carbondata.examples.sql.JavaCarbonSessionExample
 
@@ -120,7 +120,7 @@ class RunExamples extends QueryTest with BeforeAndAfterAll {
     DirectSQLExample.exampleBody(spark)
   }
 
-  //Ignoring because HiveExample depends on Hadoop ENV, but CI doesn't meet the running conditions.
+  // Ignoring because HiveExample depends on Hadoop ENV, but CI doesn't meet the running conditions.
   ignore("HiveExample") {
     SparkSqlAdapter.initSparkSQL()
     HiveExample.createCarbonTable(spark)
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
index da8c13b..c4020c6 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
@@ -18,23 +18,24 @@ package org.apache.carbondata.spark.testsuite.mergedata
 
 import java.io.{File, PrintWriter}
 
+import scala.util.Random
+
+import org.apache.spark.sql.CarbonEnv
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
+
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, CarbonFileFilter}
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
-import org.apache.spark.sql.CarbonEnv
-import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
-import scala.util.Random
-
-import org.apache.spark.sql.test.util.QueryTest
 
 class CarbonDataFileMergeTestCaseOnSI
   extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
   val file2 = resourcesPath + "/compaction/fil2.csv"
 
   override protected def beforeAll(): Unit = {
-    val n = 160000
+    val n = 16000
     createFile(file2, n * 4, n)
     sql("drop database if exists dataFileMerge cascade")
     sql("create database dataFileMerge")
@@ -79,7 +80,7 @@ class CarbonDataFileMergeTestCaseOnSI
       "CREATE INDEX indexmerge_index1 on table indexmerge (name) AS 'carbondata' properties" +
       "('table_blocksize'='1')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE indexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     val rows = sql("""Select count(*) from indexmerge where name='n164419'""").collect()
     checkAnswer(sql("""Select count(*) from indexmerge where name='n164419'"""), rows)
     assert(getDataFileCount("indexmerge_index1", "0") < 7)
@@ -96,9 +97,9 @@ class CarbonDataFileMergeTestCaseOnSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     val rows = sql("""Select count(*) from nonindexmerge where name='n164419'""").collect()
     sql(
       "CREATE INDEX nonindexmerge_index1 on table nonindexmerge (name) AS 'carbondata' " +
@@ -123,20 +124,22 @@ class CarbonDataFileMergeTestCaseOnSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     val rows = sql("""Select count(*) from nonindexmerge where name='n164419'""").collect()
     sql(
     "CREATE INDEX nonindexmerge_index2 on table nonindexmerge (name) AS 'carbondata' " +
     "properties('table_blocksize'='1')")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, "true")
-    sql("REFRESH INDEX nonindexmerge_index2 ON TABLE nonindexmerge WHERE SEGMENT.ID IN(0)").collect()
+    sql("REFRESH INDEX nonindexmerge_index2 ON TABLE nonindexmerge WHERE SEGMENT.ID IN(0)")
+      .collect()
     checkAnswer(sql("""Select count(*) from nonindexmerge where name='n164419'"""), rows)
     assert(getDataFileCount("nonindexmerge_index2", "0") < 7)
-    assert(getDataFileCount("nonindexmerge_index2", "1") == 100)
-    sql("REFRESH INDEX nonindexmerge_index2 ON TABLE nonindexmerge WHERE SEGMENT.ID IN(1)").collect()
+    assert(getDataFileCount("nonindexmerge_index2", "1") == 20)
+    sql("REFRESH INDEX nonindexmerge_index2 ON TABLE nonindexmerge WHERE SEGMENT.ID IN(1)")
+      .collect()
     checkAnswer(sql("""Select count(*) from nonindexmerge where name='n164419'"""), rows)
     assert(getDataFileCount("nonindexmerge_index2", "1") < 7)
     checkAnswer(sql("""Select count(*) from nonindexmerge where name='n164419'"""), rows)
@@ -156,11 +159,12 @@ class CarbonDataFileMergeTestCaseOnSI
       "CREATE INDEX nonindexmerge_index2 on table nonindexmerge (name) AS 'carbondata' " +
       "properties('table_blocksize'='1')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, "true")
     val exceptionMessage = intercept[RuntimeException] {
-      sql("REFRESH INDEX nonindexmerge_index2 ON TABLE nonindexmerge WHERE SEGMENT.ID IN(1,2)").collect()
+      sql("REFRESH INDEX nonindexmerge_index2 ON TABLE nonindexmerge WHERE SEGMENT.ID IN(1,2)")
+        .collect()
     }.getMessage
     assert(exceptionMessage.contains("Refresh index by segment id is failed. Invalid ID:"))
   }
@@ -177,9 +181,9 @@ class CarbonDataFileMergeTestCaseOnSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     val rows = sql("""Select count(*) from nonindexmerge where name='n164419'""").collect()
     sql(
     "CREATE INDEX nonindexmerge_index3 on table nonindexmerge (name) AS 'carbondata' " +
@@ -207,13 +211,13 @@ class CarbonDataFileMergeTestCaseOnSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     val rows = sql("""Select count(*) from nonindexmerge where name='n164419'""").collect()
     sql(
     "CREATE INDEX nonindexmerge_index4 on table nonindexmerge (name) AS 'carbondata' " +
@@ -246,8 +250,10 @@ class CarbonDataFileMergeTestCaseOnSI
     try {
       val write = new PrintWriter(fileName);
       for (i <- start until (start + line)) {
+        // scalastyle:off println
         write
           .println(i + "," + "n" + i + "," + "c" + Random.nextInt(line) + "," + Random.nextInt(80))
+        // scalastyle:on println
       }
       write.close()
     } catch {
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala
index d6193da..79faa82 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergeindex/CarbonIndexFileMergeTestCaseWithSI.scala
@@ -18,24 +18,24 @@ package org.apache.carbondata.spark.testsuite.mergeindex
 
 import java.io.{File, PrintWriter}
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants
+import scala.util.Random
+
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.filesystem.{CarbonFile, CarbonFileFilter}
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.metadata.CarbonMetadata
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
-import scala.util.Random
-
-import org.apache.spark.sql.test.util.QueryTest
 
 class CarbonIndexFileMergeTestCaseWithSI
   extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
   val file2 = resourcesPath + "/compaction/fil2.csv"
 
   override protected def beforeAll(): Unit = {
-    val n = 150000
+    val n = 15000
     createFile(file2, n * 4, n)
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, "false")
@@ -79,9 +79,9 @@ class CarbonIndexFileMergeTestCaseWithSI
       """.stripMargin)
     sql("CREATE INDEX nonindexmerge_index on table nonindexmerge (name) AS 'carbondata'")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
-    assert(getIndexFileCount("default_nonindexmerge", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index", "0") == 100)
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
+    assert(getIndexFileCount("default_nonindexmerge", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index", "0") == 20)
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_MERGE_INDEX_IN_SEGMENT, "true")
     sql("DROP TABLE IF EXISTS indexmerge")
@@ -93,7 +93,7 @@ class CarbonIndexFileMergeTestCaseWithSI
       """.stripMargin)
     sql("CREATE INDEX indexmerge_index1 on table indexmerge (name) AS 'carbondata'")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE indexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     assert(getIndexFileCount("default_indexmerge", "0") == 0)
     assert(getIndexFileCount("default_indexmerge_index1", "0") == 0)
     checkAnswer(sql("""Select count(*) from nonindexmerge"""),
@@ -111,15 +111,15 @@ class CarbonIndexFileMergeTestCaseWithSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql("CREATE INDEX nonindexmerge_index1 on table nonindexmerge (name) AS 'carbondata'")
     val rows = sql("""Select count(*) from nonindexmerge""").collect()
-    assert(getIndexFileCount("default_nonindexmerge", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index1", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index1", "1") == 100)
+    assert(getIndexFileCount("default_nonindexmerge", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index1", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index1", "1") == 20)
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_MERGE_INDEX_IN_SEGMENT, "true")
     sql("ALTER TABLE nonindexmerge COMPACT 'SEGMENT_INDEX'").collect()
@@ -141,15 +141,15 @@ class CarbonIndexFileMergeTestCaseWithSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql("CREATE INDEX nonindexmerge_index2 on table nonindexmerge (name) AS 'carbondata'")
     val rows = sql("""Select count(*) from nonindexmerge""").collect()
-    assert(getIndexFileCount("default_nonindexmerge", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index2", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index2", "1") == 100)
+    assert(getIndexFileCount("default_nonindexmerge", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index2", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index2", "1") == 20)
     sql("ALTER TABLE nonindexmerge COMPACT 'SEGMENT_INDEX'").collect()
     assert(getIndexFileCount("default_nonindexmerge", "0") == 0)
     assert(getIndexFileCount("default_nonindexmerge", "1") == 0)
@@ -173,15 +173,15 @@ class CarbonIndexFileMergeTestCaseWithSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql("CREATE INDEX nonindexmerge_index3 on table nonindexmerge (name) AS 'carbondata'")
     val rows = sql("""Select count(*) from nonindexmerge""").collect()
-    assert(getIndexFileCount("default_nonindexmerge", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index3", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index3", "1") == 100)
+    assert(getIndexFileCount("default_nonindexmerge", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index3", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index3", "1") == 20)
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_MERGE_INDEX_IN_SEGMENT, "true")
     sql("ALTER TABLE nonindexmerge COMPACT 'minor'").collect()
@@ -205,39 +205,39 @@ class CarbonIndexFileMergeTestCaseWithSI
         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " +
-        s"'GLOBAL_SORT_PARTITIONS'='100')")
+        s"'GLOBAL_SORT_PARTITIONS'='20')")
     sql("CREATE INDEX nonindexmerge_index4 on table nonindexmerge (name) AS 'carbondata'")
     val rows = sql("""Select count(*) from nonindexmerge""").collect()
-    assert(getIndexFileCount("default_nonindexmerge", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "2") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "3") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "2") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "3") == 100)
-    sql("alter table nonindexmerge set tblproperties('global_sort_partitions'='100')")
+    assert(getIndexFileCount("default_nonindexmerge", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "2") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "3") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "2") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "3") == 20)
+    sql("alter table nonindexmerge set tblproperties('global_sort_partitions'='20')")
     sql("ALTER TABLE nonindexmerge COMPACT 'minor'").collect()
     sql("ALTER TABLE nonindexmerge COMPACT 'segment_index'").collect()
-    assert(getIndexFileCount("default_nonindexmerge", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "2") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "3") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "0.1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge", "2.1") == 100)
+    assert(getIndexFileCount("default_nonindexmerge", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "2") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "3") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "0.1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge", "2.1") == 20)
     assert(getIndexFileCount("default_nonindexmerge", "0.2") == 0)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "0") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "2") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "3") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "0.1") == 100)
-    assert(getIndexFileCount("default_nonindexmerge_index4", "2.1") == 100)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "0") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "2") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "3") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "0.1") == 20)
+    assert(getIndexFileCount("default_nonindexmerge_index4", "2.1") == 20)
     assert(getIndexFileCount("default_nonindexmerge_index4", "0.2") == 0)
     checkAnswer(sql("""Select count(*) from nonindexmerge"""), rows)
     CarbonProperties.getInstance()
@@ -264,8 +264,10 @@ class CarbonIndexFileMergeTestCaseWithSI
     try {
       val write = new PrintWriter(fileName);
       for (i <- start until (start + line)) {
+        // scalastyle:off println
         write
           .println(i + "," + "n" + i + "," + "c" + Random.nextInt(line) + "," + Random.nextInt(80))
+        // scalastyle:on println
       }
       write.close()
     } catch {
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
index be2d74f..ba784fd 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
@@ -27,18 +27,19 @@ class DropTableTest extends QueryTest with BeforeAndAfterAll {
   test("test to drop parent table with all indexes") {
     sql("drop database if exists cd cascade")
     sql("create database cd")
-    sql("show tables in cd").show()
+    sql("show tables in cd").collect()
     sql("create table cd.t1 (a string, b string, c string) STORED AS carbondata")
     sql("create index i1 on table cd.t1(c) AS 'carbondata'")
     sql("create index i2 on table cd.t1(c,b) AS 'carbondata'")
-    sql("show tables in cd").show()
+    sql("show tables in cd").collect()
     sql("drop table cd.t1")
     assert(sql("show tables in cd").collect()
-      .forall(row => row.getString(1) != "i2" && row != Row("cd", "i1", "false") && row != Row("cd", "t1", "false")))
+      .forall(row => row.getString(1) != "i2" && row != Row("cd", "i1", "false") &&
+                     row != Row("cd", "t1", "false")))
   }
 
 
-  /*test("test to drop one index table out of two"){
+  /* test("test to drop one index table out of two") {
     sql("drop database if exists cd cascade")
     sql("create database cd")
     sql("show tables in cd").show()
@@ -49,7 +50,7 @@ class DropTableTest extends QueryTest with BeforeAndAfterAll {
     sql("drop index i1 on cd.t1")
     sql("show tables in cd").show()
     sql("select * from i2").show()
-  }*/
+  } */
 
   test("test to drop index tables") {
     sql("drop database if exists cd cascade")
@@ -57,11 +58,12 @@ class DropTableTest extends QueryTest with BeforeAndAfterAll {
     sql("create table cd.t1 (a string, b string, c string) STORED AS carbondata")
     sql("create index i1 on table cd.t1(c) AS 'carbondata'")
     sql("create index i2 on table cd.t1(c,b) AS 'carbondata'")
-    sql("show tables in cd").show()
+    sql("show tables in cd").collect()
     sql("drop index i1 on cd.t1")
     sql("drop index i2 on cd.t1")
     assert(sql("show tables in cd").collect()
-      .forall(row => !row.getString(1).equals("i1") && !row.getString(1).equals("i2") && row.getString(1).equals("t1")))
+      .forall(row => !row.getString(1).equals("i1") && !row.getString(1).equals("i2") &&
+                     row.getString(1).equals("t1")))
     assert(sql("show indexes on cd.t1").collect().isEmpty)
   }
 
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala
index a302ae4..6142360 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/InsertIntoCarbonTableTestCase.scala
@@ -16,28 +16,32 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
 class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
   override def beforeAll {
   }
 
   test("insert from carbon-select * columns with secondary index") {
-     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,"/bad_rec_loc_")
-     sql("drop table if exists TCarbonSource")
-     sql("drop table if exists TCarbon")
-     sql("create table TCarbonSource (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStre [...]
-     sql(s"LOAD DATA INPATH '$resourcesPath/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecit [...]
-     sql("create table TCarbon (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet str [...]
-     sql("create index index_on_insert1 on table TCarbon (deviceColor) AS 'carbondata'")
-     sql("insert into TCarbon select * from TCarbonSource")
-     checkAnswer(
-         sql("select * from TCarbonSource where deviceColor='7Device Color'"),
-         sql("select * from TCarbon where deviceColor='7Device Color'")
-     )
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, "/bad_rec_loc_")
+    sql("drop table if exists TCarbonSource")
+    sql("drop table if exists TCarbon")
+    // scalastyle:off lineLength
+    sql("create table TCarbonSource (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStree [...]
+    sql(s"LOAD DATA INPATH '$resourcesPath/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity [...]
+    sql("create table TCarbon (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet stri [...]
+    // scalastyle:on lineLength
+    sql("create index index_on_insert1 on table TCarbon (deviceColor) AS 'carbondata'")
+    sql("insert into TCarbon select * from TCarbonSource")
+    checkAnswer(
+      sql("select * from TCarbonSource where deviceColor='7Device Color'"),
+      sql("select * from TCarbon where deviceColor='7Device Color'")
+    )
   }
 
   override def afterAll {
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithIndex.scala
index 78fade3..b193aa7 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithIndex.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestAlterTableColumnRenameWithIndex.scala
@@ -16,13 +16,13 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils
-.isFilterPushedDownToSI;
-import org.apache.carbondata.core.metadata.CarbonMetadata
-import org.apache.carbondata.spark.exception.ProcessMetaDataException
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.core.metadata.CarbonMetadata
+import org.apache.carbondata.spark.exception.ProcessMetaDataException
+import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
+
 class TestAlterTableColumnRenameWithIndex extends QueryTest with BeforeAndAfterAll {
   override protected def beforeAll(): Unit = {
     dropTable()
@@ -73,9 +73,11 @@ class TestAlterTableColumnRenameWithIndex extends QueryTest with BeforeAndAfterA
     sql("alter table si_rename change c test string")
     sql("alter table si_rename change d testSI string")
     sql("show indexes on si_rename").collect
-    val query2 = sql("select test,testsi from si_rename where testsi = 'pqr' or test = 'def'").count()
+    val query2 = sql("select test,testsi from si_rename where testsi = 'pqr' or test = 'def'")
+      .count()
     assert(query1 == query2)
-    val df = sql("select test,testsi from si_rename where testsi = 'pqr' or test = 'def'").queryExecution.sparkPlan
+    val df = sql("select test,testsi from si_rename where testsi = 'pqr' or test = 'def'")
+      .queryExecution.sparkPlan
     if (!isFilterPushedDownToSI(df)) {
       assert(false)
     } else {
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala
index fbe351e..3dfa855 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestBroadCastSIFilterPushJoinWithUDF.scala
@@ -16,18 +16,18 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils
-.isFilterPushedDownToSI;
 import org.apache.spark.sql.DataFrame
 import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.util.SparkUtil
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
+
 /**
  * test cases for testing BroadCastSIFilterPushJoin with udf
  */
 class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfterAll {
-
+  // scalastyle:off lineLength
   val testSecondaryIndexForORFilterPushDown = new TestIndexModelForORFilterPushDown
   var carbonQuery: DataFrame = null
   var hiveQuery: DataFrame = null
@@ -255,8 +255,7 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
   test("test all the above udfs") {
     // all the above udf
     // TO DO, need to remove this check, once JIRA for spark 2.4 has been resolved (SPARK-30974)
-    if(SparkUtil.isSparkVersionEqualTo("2.3"))
-      {
+    if (SparkUtil.isSparkVersionEqualTo("2.3")) {
         carbonQuery = sql(
           "select approx_count_distinct(empname), approx_count_distinct(deptname), collect_list" +
           "(empname), collect_set(deptname), corr(deptno, empno), covar_pop(deptno, empno), " +
@@ -290,9 +289,8 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
   test("test alias of all the above udf") {
     // alias all the above udf
     // TO DO, need to remove this check, once JIRA for spark 2.4 has been resolved (SPARK-30974)
-    if(SparkUtil.isSparkVersionEqualTo("2.3"))
-      {
-        carbonQuery = sql(
+    if (SparkUtil.isSparkVersionEqualTo("2.3")) {
+      carbonQuery = sql(
           "select approx_count_distinct(empname) as c1, approx_count_distinct(deptname) as c2, collect_list" +
           "(empname) as c3, collect_set(deptname) as c4, corr(deptno, empno) as c5, covar_pop(deptno, empno) as c6, " +
           "covar_samp(deptno, empno) as c7, grouping(designation) as c8, grouping(deptname) as c9, mean(deptno) as c10, mean" +
@@ -302,7 +300,7 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
           "COALESCE(CONV(substring(empname, 3, 2), 16, 10), '') as c25, COALESCE(CONV(substring(deptname, 3," +
           " 2), 16, 10), '') as c26 from udfValidation where empname = 'pramod' or deptname = 'network' or " +
           "designation='TL' group by designation, deptname, empname with ROLLUP")
-        hiveQuery = sql(
+      hiveQuery = sql(
           "select approx_count_distinct(empname) as c1, approx_count_distinct(deptname) as c2, collect_list" +
           "(empname) as c3, collect_set(deptname) as c4, corr(deptno, empno) as c5, covar_pop(deptno, empno) as c6, " +
           "covar_samp(deptno, empno) as c7, grouping(designation) as c8, grouping(deptname) as c9, mean(deptno) as c10, mean" +
@@ -312,22 +310,20 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
           "COALESCE(CONV(substring(empname, 3, 2), 16, 10), '') as c25, COALESCE(CONV(substring(deptname, 3," +
           " 2), 16, 10), '') as c26 from udfHive where empname = 'pramod' or deptname = 'network' or " +
           "designation='TL' group by designation, deptname, empname with ROLLUP")
-        if (isFilterPushedDownToSI(carbonQuery.queryExecution.executedPlan)) {
-          assert(true)
-        } else {
-          assert(false)
-        }
-        checkAnswer(carbonQuery, hiveQuery)
+      if (isFilterPushedDownToSI(carbonQuery.queryExecution.executedPlan)) {
+        assert(true)
+      } else {
+        assert(false)
       }
-
+      checkAnswer(carbonQuery, hiveQuery)
+    }
   }
 
   test("test cast of all the above udf") {
     // cast all the above udf
     // TO DO, need to remove this check, once JIRA for spark 2.4 has been resolved (SPARK-30974)
-    if(SparkUtil.isSparkVersionEqualTo("2.3"))
-      {
-        carbonQuery = sql(
+    if (SparkUtil.isSparkVersionEqualTo("2.3")) {
+      carbonQuery = sql(
           "select cast(approx_count_distinct(empname) as string), cast(approx_count_distinct(deptname) as string), collect_list" +
           "(empname), collect_set(deptname), cast(corr(deptno, empno) as string), cast(covar_pop(deptno, empno) as string), " +
           "cast(covar_samp(deptno, empno) as string), cast(grouping(designation) as string), cast(grouping(deptname) as string), cast(mean(deptno) as string), cast(mean" +
@@ -337,7 +333,7 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
           "COALESCE(CONV(substring(empname, 3, 2), 16, 10), ''), COALESCE(CONV(substring(deptname, 3," +
           " 2), 16, 10), '') from udfValidation where empname = 'pramod' or deptname = 'network' or " +
           "designation='TL' group by designation, deptname, empname with ROLLUP")
-        hiveQuery = sql(
+      hiveQuery = sql(
           "select cast(approx_count_distinct(empname) as string), cast(approx_count_distinct(deptname) as string), collect_list" +
           "(empname), collect_set(deptname), cast(corr(deptno, empno) as string), cast(covar_pop(deptno, empno) as string), " +
           "cast(covar_samp(deptno, empno) as string), cast(grouping(designation) as string), cast(grouping(deptname) as string), cast(mean(deptno) as string), cast(mean" +
@@ -347,22 +343,20 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
           "COALESCE(CONV(substring(empname, 3, 2), 16, 10), ''), COALESCE(CONV(substring(deptname, 3," +
           " 2), 16, 10), '') from udfHive where empname = 'pramod' or deptname = 'network' or " +
           "designation='TL' group by designation, deptname, empname with ROLLUP")
-        if (isFilterPushedDownToSI(carbonQuery.queryExecution.executedPlan)) {
-          assert(true)
-        } else {
-          assert(false)
-        }
-        checkAnswer(carbonQuery, hiveQuery)
+      if (isFilterPushedDownToSI(carbonQuery.queryExecution.executedPlan)) {
+        assert(true)
+      } else {
+        assert(false)
       }
-
+      checkAnswer(carbonQuery, hiveQuery)
+    }
   }
 
   test("test cast and alias with all the above udf") {
     // cast and alias with all the above udf
     // TO DO, need to remove this check, once JIRA for spark 2.4 has been resolved (SPARK-30974)
-    if(SparkUtil.isSparkVersionEqualTo("2.3"))
-      {
-        carbonQuery = sql(
+    if (SparkUtil.isSparkVersionEqualTo("2.3")) {
+      carbonQuery = sql(
           "select cast(approx_count_distinct(empname) as string) as c1, cast(approx_count_distinct(deptname) as string) as c2, collect_list" +
           "(empname) as c3, collect_set(deptname) as c4, cast(corr(deptno, empno) as string) as c5, cast(covar_pop(deptno, empno) as string) as c6, " +
           "cast(covar_samp(deptno, empno) as string) as c7, cast(grouping(designation) as string) as c8, cast(grouping(deptname) as string) as c9, cast(mean(deptno) as string) as c10, cast(mean" +
@@ -372,7 +366,7 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
           "COALESCE(CONV(substring(empname, 3, 2), 16, 10), '') as c26, COALESCE(CONV(substring(deptname, 3," +
           " 2), 16, 10), '') as c27 from udfValidation where empname = 'pramod' or deptname = 'network' or " +
           "designation='TL' group by designation, deptname, empname with ROLLUP")
-        hiveQuery = sql(
+      hiveQuery = sql(
           "select cast(approx_count_distinct(empname) as string) as c1, cast(approx_count_distinct(deptname) as string) as c2, collect_list" +
           "(empname) as c3, collect_set(deptname) as c4, cast(corr(deptno, empno) as string) as c5, cast(covar_pop(deptno, empno) as string) as c6, " +
           "cast(covar_samp(deptno, empno) as string) as c7, cast(grouping(designation) as string) as c8, cast(grouping(deptname) as string) as c9, cast(mean(deptno) as string) as c10, cast(mean" +
@@ -382,14 +376,13 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
           "COALESCE(CONV(substring(empname, 3, 2), 16, 10), '') as c26, COALESCE(CONV(substring(deptname, 3," +
           " 2), 16, 10), '') as c27 from udfHive where empname = 'pramod' or deptname = 'network' or " +
           "designation='TL' group by designation, deptname, empname with ROLLUP")
-        if (isFilterPushedDownToSI(carbonQuery.queryExecution.executedPlan)) {
-          assert(true)
-        } else {
-          assert(false)
-        }
-        checkAnswer(carbonQuery, hiveQuery)
+      if (isFilterPushedDownToSI(carbonQuery.queryExecution.executedPlan)) {
+        assert(true)
+      } else {
+        assert(false)
       }
-
+      checkAnswer(carbonQuery, hiveQuery)
+    }
   }
 
   test("test udf on filter - concat") {
@@ -434,5 +427,5 @@ class TestBroadCastSIFilterPushJoinWithUDF extends QueryTest with BeforeAndAfter
     sql("drop table if exists udfValidation")
     sql("drop table if exists udfHive")
   }
-
+  // scalastyle:on lineLength
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithIndex.scala
index 5551c32..85d24c3 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithIndex.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCTASWithIndex.scala
@@ -27,7 +27,7 @@ import org.scalatest.BeforeAndAfterAll
  * carbon table having Secondary index column
  */
 class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
-
+  // scalastyle:off lineLength
   override def beforeAll: Unit = {
     dropTables()
     sql(
@@ -148,7 +148,7 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     val df = sql("explain extended select c_phone,c_mktsegment from carbon_table where c_mktsegment like 'BU%'").collect()
     df(0).getString(0).contains("default.sc_indx1")
     sql("create table carbon_table1 stored as carbondata as select c_phone,c_mktsegment from carbon_table where c_mktsegment like 'BU%'")
-    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989","BUILDING")))
+    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989", "BUILDING")))
     sql("drop table if exists carbon_table1")
   }
 
@@ -157,7 +157,7 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     val df = sql("explain extended select first_value(c_phone), first_value(c_mktsegment) as a from carbon_table where c_mktsegment like 'BU%'").collect()
     df(0).getString(0).contains("default.sc_indx1")
     sql("create table carbon_table1 stored as carbondata as select first_value(c_phone) as a,first_value(c_mktsegment) as b from carbon_table where c_mktsegment like 'BU%'")
-    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989","BUILDING")))
+    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989", "BUILDING")))
     sql("drop table if exists carbon_table1")
   }
 
@@ -166,7 +166,7 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     val df = sql("explain extended select last_value(c_phone),last_value(c_mktsegment) as a from carbon_table where c_mktsegment like 'BU%'").collect()
     df(0).getString(0).contains("default.sc_indx1")
     sql("create table carbon_table1 stored as carbondata as select last_value(c_phone) as a ,last_value(c_mktsegment) as b from carbon_table where c_mktsegment like 'BU%'")
-    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989","BUILDING")))
+    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989", "BUILDING")))
     sql("drop table if exists carbon_table1")
   }
 
@@ -175,7 +175,7 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     val df = sql("explain extended select first(c_phone),first(c_mktsegment) as a from carbon_table where c_mktsegment like 'BU%'").collect()
     df(0).getString(0).contains("default.sc_indx1")
     sql("create table carbon_table1 stored as carbondata as select first(c_phone) as a,first(c_mktsegment) as b from carbon_table where c_mktsegment like 'BU%'")
-    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989","BUILDING")))
+    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989", "BUILDING")))
     sql("drop table if exists carbon_table1")
   }
 
@@ -184,7 +184,7 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     val df = sql("explain extended select last(c_phone),last(c_mktsegment) as a from carbon_table where c_mktsegment like 'BU%'").collect()
     df(0).getString(0).contains("default.sc_indx1")
     sql("create table carbon_table1 stored as carbondata as select last(c_phone) as a,last(c_mktsegment) as b from carbon_table where c_mktsegment like 'BU%'")
-    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989","BUILDING")))
+    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989", "BUILDING")))
     sql("drop table if exists carbon_table1")
   }
 
@@ -202,7 +202,7 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
   }
 
   test("test ctas with carbon table with SI having cast of UDF functions") {
-    if(SparkUtil.isSparkVersionEqualTo("2.3")) {
+    if (SparkUtil.isSparkVersionEqualTo("2.3")) {
       sql("drop table if exists carbon_table1")
       val query =
         "select cast(approx_count_distinct(empname) as string) as c1, cast(approx_count_distinct" +
@@ -334,7 +334,8 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     df = sql("explain extended select c_phone from carbon_table where c_phone = '25-989-741-2989' union all select c_phone from carbon_table  where c_phone = '25-989-741-2989'").collect()
     df(0).getString(0).contains("default.sc_indx1")
     sql("create table carbon_table1 stored as carbondata as select c_phone from carbon_table where c_phone = '25-989-741-2989' union all select c_phone from carbon_table  where c_phone = '25-989-741-2989'")
-    checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989"),Row("25-989-741-2989")))
+    checkAnswer(sql("select * from carbon_table1"),
+      Seq(Row("25-989-741-2989"), Row("25-989-741-2989")))
     sql("drop table if exists carbon_table1")
   }
 
@@ -346,4 +347,5 @@ class TestCTASWithIndex extends QueryTest with BeforeAndAfterAll{
     checkAnswer(sql("select * from carbon_table1"), Seq(Row("25-989-741-2989", "25-989-741-2989")))
     sql("drop table if exists carbon_table1")
   }
+  // scalastyle:on lineLength
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala
index be65f15..c380811 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCacheOperationsForSI.scala
@@ -25,9 +25,9 @@ import org.apache.spark.sql.CarbonEnv
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.index.CarbonIndexUtil
 import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.cache.CacheProvider
-import org.scalatest.BeforeAndAfterAll
 
 class TestCacheOperationsForSI extends QueryTest with BeforeAndAfterAll {
 
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala
index 69ba00a..48335c1 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonJoin.scala
@@ -16,8 +16,8 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.{CarbonEnv, Row}
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 class TestCarbonJoin extends QueryTest with BeforeAndAfterAll {
@@ -37,7 +37,8 @@ class TestCarbonJoin extends QueryTest with BeforeAndAfterAll {
 
     val df2 = sql("select id as f91 from table1")
     df2.createOrReplaceTempView("tempTable_2")
-    sql("select t1.f91 from tempTable_2 t1, ptable t2 where t1.f91 = t2.pid ").write.saveAsTable("result")
+    sql("select t1.f91 from tempTable_2 t1, ptable t2 where t1.f91 = t2.pid ").write
+      .saveAsTable("result")
     checkAnswer(sql("select count(*) from result"), Seq(Row(1)))
     checkAnswer(sql("select * from result"), Seq(Row("person")))
 
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala
index a2f7ade..bd79cef 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexForCleanAndDeleteSegment.scala
@@ -40,11 +40,12 @@ class TestCreateIndexForCleanAndDeleteSegment extends QueryTest with BeforeAndAf
         "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
         "utilization int,salary int) STORED AS carbondata")
 
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-    "TABLE delete_segment_by_id OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE delete_segment_by_id " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
 
 
-    sql("create index index_no_dictionary on table delete_segment_by_id (workgroupcategoryname, empname) AS 'carbondata'")
+    sql("create index index_no_dictionary on table delete_segment_by_id (" +
+        "workgroupcategoryname, empname) AS 'carbondata'")
 
     sql("delete from table delete_segment_by_id where segment.id IN(0)")
 
@@ -65,7 +66,8 @@ class TestCreateIndexForCleanAndDeleteSegment extends QueryTest with BeforeAndAf
 //        "TBLPROPERTIES('DICTIONARY_EXCLUDE'='empname')")
 //
 //    sql("LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO " +
-//        "TABLE clean_files_test OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
+//        "TABLE clean_files_test OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"',
+//        'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
 //
 //    sql("drop index if exists index_no_dictionary on clean_files_test")
 //
@@ -76,7 +78,8 @@ class TestCreateIndexForCleanAndDeleteSegment extends QueryTest with BeforeAndAf
 //    sql("clean files for table clean_files_test")
 //
 //    val indexTable = CarbonMetadata.getInstance().getCarbonTable("default_index_no_dictionary")
-//    val carbonTablePath: CarbonTablePath = CarbonStorePath.getCarbonTablePath(indexTable.getStorePath, indexTable.getCarbonTableIdentifier)
+//    val carbonTablePath: CarbonTablePath = CarbonStorePath.getCarbonTablePath(
+//    indexTable.getStorePath, indexTable.getCarbonTableIdentifier)
 //    val dataDirectoryPath: String = carbonTablePath.getCarbonDataDirectoryPath("0", "0")
 //    if (CarbonUtil.isFileExists(dataDirectoryPath)) {
 //      assert(false)
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
index 00fe37d..72a1a94 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
@@ -14,16 +14,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.scalatest.BeforeAndAfterAll
+package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.core.util.CarbonProperties
-import scala.collection.JavaConverters._
+import java.io.File
+import java.util.UUID
 
 import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.format.TableInfo
 
 /**
@@ -31,20 +31,15 @@ import org.apache.carbondata.format.TableInfo
  */
 class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
 
+  var path: String = s"/tmp/parquet${UUID.randomUUID().toString}"
+
   override def beforeAll {
-    sql("drop table if exists carbon")
+    dropIndexAndTable()
     sql("CREATE table carbon (empno string, empname String, " +
         "designation String, doj Timestamp, workgroupcategory string, " +
         "workgroupcategoryname String, deptno int, deptname String, projectcode int, " +
         "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
         "utilization int,salary int) STORED AS carbondata")
-    sql("drop table if exists createindextemptable")
-    sql("drop table if exists createindextemptable1")
-    sql("drop table if exists dropindextemptable")
-    sql("drop table if exists dropindextemptable1")
-    sql(s"DROP DATABASE if  exists temptablecheckDB cascade")
-    sql("drop table if exists stream_si")
-    sql("drop table if exists part_si")
     sql("CREATE TABLE stream_si(c1 string,c2 int,c3 string,c5 string) " +
         "STORED AS carbondata TBLPROPERTIES ('streaming' = 'true')")
     sql("CREATE TABLE part_si(c1 string,c2 int,c3 string,c5 string) PARTITIONED BY (c6 string)" +
@@ -58,9 +53,9 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     } catch {
       case ex: Exception =>
         assert(true)
-    } finally{
+    } finally {
       sql("drop index if exists index_without_parentTable on carbon")
-     }
+    }
   }
 
   test("test create index table on measure column") {
@@ -71,9 +66,9 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
       case ex: Exception =>
         assert(ex.getMessage.equalsIgnoreCase(
           "Secondary Index is not supported for measure column : salary"))
-    } finally{
+    } finally {
       sql("drop index if exists index_on_measure on carbon")
-     }
+    }
   }
 
   test("test create index table on dimension,measure column") {
@@ -84,59 +79,59 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
       case ex: Exception =>
         assert(ex.getMessage.equalsIgnoreCase(
           "Secondary Index is not supported for measure column : salary"))
-    } finally{
+    } finally {
       sql("drop index if exists index_on_measure on carbon")
-     }
+    }
 
   }
 
   test("Test case insensitive create & drop index command") {
     sql("drop INDEX if exists index_case_insensitive ON dEfaUlt.caRbon")
-    sql("CREATE INDEX index_case_insensitive ON TABLE dEfaUlt.cArBon (workgroupcategory) AS 'carbondata'")
+    sql("CREATE INDEX index_case_insensitive ON TABLE dEfaUlt.cArBon (" +
+        "workgroupcategory) AS 'carbondata'")
     sql("drop INDEX index_case_insensitive ON CarBOn")
   }
 
   test("test create index table with indextable col size > parent table key col size") {
     try {
-      sql("create index indexOnCarbon on table carbon (empno,empname,designation,doj,workgroupcategory," +
-          "workgroupcategoryname,deptno,deptname,projectcode,projectjoindate,projectenddate,attendance," +
-          "utilization,salary) AS 'carbondata'")
+      sql("create index indexOnCarbon on table carbon (empno,empname,designation,doj," +
+          "workgroupcategory,workgroupcategoryname,deptno,deptname,projectcode,projectjoindate," +
+          "projectenddate,attendance,utilization,salary) AS 'carbondata'")
       assert(false)
     } catch {
       case ex: Exception =>
         assert(ex.getMessage.equalsIgnoreCase(
           "Secondary Index is not supported for measure column : deptno"))
-    } finally{
+    } finally {
       sql("drop index if exists indexOnCarbon on carbon")
-     }
-
+    }
   }
 
   test("test create index table with duplicate column") {
     try {
-      sql("create index index_on_measure on table carbon (empno,empname,designation,doj,"+
+      sql("create index index_on_measure on table carbon (empno,empname,designation,doj," +
           "workgroupcategory,empno) AS 'carbondata'")
       assert(false)
     } catch {
       case ex: Exception =>
         assert(ex.getMessage.equalsIgnoreCase("Duplicate column name found : empno"))
-    } finally{
+    } finally {
       sql("drop index if exists index_on_measure on carbon")
-     }
-
+    }
   }
 
   test("test create index table on more than one column") {
     try {
       sql("drop index if exists index_more_columns on carbon")
-      sql("create index index_more_columns on table carbon (doj,designation,deptname) AS 'carbondata'")
+      sql("create index index_more_columns on table carbon (" +
+          "doj,designation,deptname) AS 'carbondata'")
       assert(true)
     } catch {
       case ex: Exception =>
         assert(false)
-    } finally{
+    } finally {
       sql("drop index if exists index_more_columns on carbon")
-     }
+    }
   }
 
   test("test create index table with invalid column") {
@@ -147,9 +142,9 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     } catch {
       case ex: Exception =>
         assert(true)
-    } finally{
+    } finally {
       sql("drop index if exists index_with_invalid_column on carbon")
-     }
+    }
   }
 
   test("test create index table with index table name containing invalid characters") {
@@ -182,7 +177,7 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
         assert(true)
     } finally {
       sql("drop index if exists index_first_column on carbon")
-     }
+    }
   }
 
   test("test create index table") {
@@ -195,7 +190,7 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
         assert(false)
     } finally {
       sql("drop index if exists index_1 on carbon")
-     }
+    }
   }
 
   test("test 2 create index with same name") {
@@ -209,7 +204,7 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
         assert(true)
     } finally {
       sql("drop index if exists index_1 on carbon")
-     }
+    }
   }
 
 
@@ -220,7 +215,8 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
       assert(false)
     } catch  {
       case ex: Exception => assert(ex.getMessage.equalsIgnoreCase(
-          "Delete is not permitted on table that contains secondary index [default.carbon]. Drop all indexes and retry"))
+          "Delete is not permitted on table that contains secondary index [default.carbon]. Drop
+          all indexes and retry"))
     }
     sql("drop index if exists indexdelete on carbon")
   } */
@@ -232,7 +228,8 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
       assert(false)
     } catch  {
       case ex: Exception => assert(ex.getMessage.equalsIgnoreCase(
-          "Update is not permitted on table that contains secondary index [default.carbon]. Drop all indexes and retry"))
+          "Update is not permitted on table that contains secondary index [default.carbon]. Drop
+          all indexes and retry"))
     }
     sql("drop index if exists indexupdate on carbon")
   } */
@@ -241,17 +238,19 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     try {
       sql("drop table if exists TCarbonSource")
       sql("drop table if exists TCarbon")
+      // scalastyle:off lineLength
       sql("create table TCarbonSource (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStr [...]
       sql(s"LOAD DATA INPATH '$resourcesPath/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activeci [...]
       sql("create table TCarbon (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet st [...]
+      // scalastyle:on lineLength
       sql("create index index_on_insert on table TCarbon (deviceColor) AS 'carbondata'")
       sql("insert into index_on_insert select * from TCarbonSource")
       assert(false)
-    } catch  {
+    } catch {
       case ex: Exception => assert(true)
     } finally {
       sql("drop index if exists index_on_insert on TCarbon")
-     }
+    }
   }
 
   test("test create one index and compare the results") {
@@ -262,19 +261,21 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
         "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
         "utilization int,salary int) STORED AS CARBONDATA")
     sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE carbontable OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
+        "TABLE carbontable OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', " +
+        "'BAD_RECORDS_ACTION'='FORCE')")
     val withoutIndex =
-      sql("select empno from carbontable where empname = 'ayushi' or empname = 'krithin' or empname = 'madhan'")
-        .collect().toSeq
-    sql("create index empnameindex on table carbontable (workgroupcategoryname,empname) AS 'carbondata'")
+      sql("select empno from carbontable where empname = 'ayushi' or " +
+          "empname = 'krithin' or empname = 'madhan'").collect().toSeq
+    sql("create index empnameindex on table carbontable (" +
+      "workgroupcategoryname,empname) AS 'carbondata'")
 
-    checkAnswer(sql("select empno from carbontable where empname = 'ayushi' or empname = 'krithin' or empname = 'madhan'"),
-      withoutIndex)
+    checkAnswer(sql("select empno from carbontable where empname = 'ayushi' or " +
+                    "empname = 'krithin' or empname = 'madhan'"), withoutIndex)
     sql("drop index if exists empnameindex on carbontable")
     sql("drop table if exists carbontable")
   }
 
-  test("test create table with column name as positionID"){
+  test("test create table with column name as positionID") {
     try {
       sql("CREATE table carbontable (empno int, positionID String, " +
           "designation String, doj Timestamp, workgroupcategory int, " +
@@ -282,12 +283,12 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
           "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
           "utilization int,salary int) STORED AS CARBONDATA " +
           "TBLPROPERTIES('DICTIONARY_EXCLUDE'='empname')")
-    }catch  {
+    } catch {
       case ex: Exception => assert(true)
     }
   }
 
-  test("test create table with column name as positionReference"){
+  test("test create table with column name as positionReference") {
     try {
       sql("CREATE table carbontable (empno int, positionReference String, " +
           "designation String, doj Timestamp, workgroupcategory int, " +
@@ -295,15 +296,16 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
           "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
           "utilization int,salary int) STORED AS CARBONDATA " +
           "TBLPROPERTIES('DICTIONARY_EXCLUDE'='empname')")
-    }catch  {
+    } catch {
       case ex: Exception => assert(true)
     }
   }
 
   test("create index on temp table") {
+    emptyParquetFolder()
     sql(
       "CREATE temporary table createindextemptable(id int,name string,city string,age int) using " +
-      "parquet options(path='/tmp')")
+      s"parquet options(path='${path}')")
     sql("insert into createindextemptable values(1,'string','string',3)")
     sql("insert into createindextemptable values(1,'string','string',3)")
     sql("insert into createindextemptable values(1,'string','string',3)")
@@ -322,11 +324,12 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
   test("create index on temp table when carbon table exists") {
     sql(s"CREATE DATABASE if not exists temptablecheckDB")
     sql("USE temptablecheckDB")
-    sql(
-      "CREATE TABLE createindextemptable1(id int, name string, city string, age int) STORED AS CARBONDATA ")
+    sql("CREATE TABLE createindextemptable1(" +
+        "id int, name string, city string, age int) STORED AS CARBONDATA ")
+    emptyParquetFolder()
     sql(
       "CREATE temporary table createindextemptable1(id int,name string,city string,age int) using" +
-      " parquet options(path='/tmp')")
+      s" parquet options(path='${path}')")
     sql("insert into createindextemptable1 values(1,'string','string',3)")
     sql("insert into createindextemptable1 values(1,'string','string',3)")
     sql("insert into createindextemptable1 values(1,'string','string',3)")
@@ -345,8 +348,8 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     sql("insert into temptablecheckDB.createindextemptable1 select 1,'string','string',3")
     sql("insert into temptablecheckDB.createindextemptable1 select 1,'string','string',3")
     try {
-      sql(
-        "create index empnameindex on table temptablecheckDB.createindextemptable1 (city) AS 'carbondata'")
+      sql("create index empnameindex on table temptablecheckDB.createindextemptable1 (" +
+          "city) AS 'carbondata'")
       assert(true)
     } catch {
       case e: Exception =>
@@ -374,13 +377,17 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     val thrown = intercept[Exception] {
       sql("create index temp_ind on table si_table (longstr) AS 'carbondata'")
     }
-    assert(thrown.getMessage.contains("one or more index columns specified contains long string column in table default.si_table. SI cannot be created on long string columns."))
+    assert(thrown.getMessage
+      .contains(
+        "one or more index columns specified contains long string column in table default" +
+        ".si_table. SI cannot be created on long string columns."))
   }
 
   test("drop index on temp table") {
+    emptyParquetFolder()
     sql(
       "CREATE temporary table dropindextemptable(id int,name string,city string,age int) using " +
-      "parquet options(path='/tmp')")
+      s"parquet options(path='${path}')")
     sql("insert into dropindextemptable values(1,'string','string',3)")
     sql("insert into dropindextemptable values(1,'string','string',3)")
     sql("insert into dropindextemptable values(1,'string','string',3)")
@@ -389,26 +396,27 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
       sql("drop index if exists empnameindex on dropindextemptable")
     } catch {
       case _: Exception =>
-       assert(false)
+        assert(false)
     }
   }
 
   test("drop index on temp table when carbon table exists") {
     sql(s"CREATE DATABASE if not exists temptablecheckDB")
     sql("USE temptablecheckDB")
-    sql(
-      "CREATE TABLE dropindextemptable1(id int, name string, city string, age int) STORED AS CARBONDATA")
+    sql("CREATE TABLE dropindextemptable1(" +
+        "id int, name string, city string, age int) STORED AS CARBONDATA")
+    emptyParquetFolder()
     sql(
       "CREATE temporary table dropindextemptable1(id int,name string,city string,age int) using " +
-      "parquet options(path='/tmp')")
+      s"parquet options(path='${path}')")
     sql("insert into dropindextemptable1 values(1,'string','string',3)")
     sql("insert into dropindextemptable1 values(1,'string','string',3)")
     sql("insert into dropindextemptable1 values(1,'string','string',3)")
     sql("insert into temptablecheckDB.dropindextemptable1 select 1,'string','string',3")
     sql("insert into temptablecheckDB.dropindextemptable1 select 1,'string','string',3")
     sql("insert into temptablecheckDB.dropindextemptable1 select 1,'string','string',3")
-    sql(
-      "create index empnaindex on table temptablecheckDB.dropindextemptable1 (city) AS 'carbondata'")
+    sql("create index empnaindex on table temptablecheckDB.dropindextemptable1 (" +
+        "city) AS 'carbondata'")
     try {
       sql("drop index if exists empnaindex on temptablecheckDB.dropindextemptable1")
       assert(true)
@@ -422,7 +430,8 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     }
   }
 
-  test("test creation of index table 2 times with same name, on error drop and create with same name again") {
+  test("test creation of index table 2 times with same name, on error drop " +
+       "and create with same name again") {
     sql("DROP TABLE IF EXISTS carbon_si_same_name_test")
     sql("DROP INDEX IF EXISTS si_drop_i1 on carbon_si_same_name_test")
     // create table
@@ -448,7 +457,7 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
 
   test("test blocking secondary Index on streaming table") {
     intercept[RuntimeException] {
-      sql("""create index streamin_index on table stream_si(c3) AS 'carbondata'""").show()
+      sql("""create index streamin_index on table stream_si(c3) AS 'carbondata'""").collect()
     }
   }
 
@@ -461,6 +470,7 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
   }
 
   object CarbonMetastore {
+
     import org.apache.carbondata.core.reader.ThriftReader
 
     def readSchemaFileToThriftTable(schemaFilePath: String): TableInfo = {
@@ -481,18 +491,31 @@ class TestCreateIndexTable extends QueryTest with BeforeAndAfterAll {
     }
   }
 
+  def emptyParquetFolder(): Unit = {
+    val file = new File(path)
+    FileFactory.deleteAllFilesOfDir(file)
+    file.mkdir()
+  }
+
   override def afterAll: Unit = {
+    dropIndexAndTable()
+    FileFactory.deleteAllFilesOfDir(new File(path))
+  }
+
+  private def dropIndexAndTable(): Unit = {
+    sql("drop index if exists index_case_insensitive on carbon")
     sql("drop table if exists carbon")
     sql("drop table if exists carbontable")
     sql("drop table if exists createindextemptable")
-    sql("drop table if exists createindextemptable1")
+    sql("drop index if exists empnameindex on temptablecheckDB.createindextemptable1")
+    sql("drop table if exists temptablecheckDB.createindextemptable1")
     sql("drop table if exists dropindextemptable")
-    sql("drop table if exists dropindextemptable1")
-    sql("drop index if exists empnameindex on createindextemptable1")
-    sql(s"DROP DATABASE if  exists temptablecheckDB cascade")
+    sql("drop table if exists temptablecheckDB.dropindextemptable1")
+    sql("drop index if exists si_drop_i1 on carbon_si_same_name_test")
+    sql("drop table if exists carbon_si_same_name_test")
+    sql(s"DROP DATABASE if exists temptablecheckDB cascade")
 
     sql("drop index if exists t_ind1 on test1")
     sql("drop table if exists test1")
   }
-
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
index 156aeb1..d6686a8 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
@@ -17,22 +17,21 @@
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
 import org.apache.spark.sql.{CarbonEnv, Row}
+import org.apache.spark.sql.hive.CarbonRelation
+import org.apache.spark.sql.test.SparkTestQueryExecutor
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus, SegmentStatusManager}
 import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.spark.sql.hive.CarbonRelation
-import org.apache.spark.sql.test.SparkTestQueryExecutor
-import org.apache.spark.sql.test.util.QueryTest
-
 import org.apache.carbondata.core.util.path.CarbonTablePath
 
 /**
  * test cases for testing creation of index table with load and compaction
  */
 class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfterAll {
-
+  // scalastyle:off lineLength
   override def beforeAll {
     sql("drop table if exists index_test")
     sql("CREATE TABLE index_test (integer_column1 string,date1 timestamp,date2 timestamp,ID String,string_column1 string,string_column2 string) STORED AS CARBONDATA")
@@ -115,7 +114,7 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
         "TABLE compaction_load OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
 
     sql("alter table compaction_load compact 'major'")
-    
+
     sql("drop index if exists index_no_dictionary2 on compaction_load")
 
     sql("create index index_no_dictionary2 on table compaction_load (workgroupcategoryname,empname) AS 'carbondata'")
@@ -138,7 +137,7 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
 //        "TABLE auto_compaction_index OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
 //    sql("LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO " +
 //        "TABLE auto_compaction_index OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
-//   
+//
 //    sql("drop index if exists index_no_dictionary3 on auto_compaction_index")
 //    sql("create index index_no_dictionary3 on table auto_compaction_index (empname) AS 'carbondata'")
 //
@@ -148,7 +147,7 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
 //
 //    sql("drop table if exists auto_compaction_index")
 //  }
-  
+
   test("test create index with jumbled order of parent table cols") {
     sql("drop index if exists indextable05 ON index_test")
     sql("CREATE INDEX indextable05 ON TABLE index_test (string_column2,id,date2,date1) AS 'carbondata'")
@@ -174,7 +173,7 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
     sql("drop table if exists seccust")
   }
 
-  /*test("Load once and create sec index and load again and do select ") {
+  /* test("Load once and create sec index and load again and do select ") {
     sql("drop table if exists seccust1")
     sql("create table seccust1 (id string, c_custkey string, c_name string, c_address string, c_nationkey string, c_phone string,c_acctbal decimal, c_mktsegment string, c_comment string) STORED AS carbondata")
     sql("load data  inpath './src/test/resources/secindex/firstunique.csv' into table seccust1 options('DELIMITER'='|','QUOTECHAR'='\"','FILEHEADER'='id,c_custkey,c_name,c_address,c_nationkey,c_phone,c_acctbal,c_mktsegment,c_comment')")
@@ -184,7 +183,7 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
     checkAnswer(sql("select c_phone from sc_indx5"),
       Seq(Row("25-989-741-2989"),Row("25-989-741-2989")))
     sql("drop table if exists seccust1")
-  }*/
+  } */
 
   test("test SI with auto compaction and check that table status is changed to compacted") {
     try {
@@ -263,5 +262,5 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter
     sql("drop table if exists table_with_flat")
     sql("drop table if exists table1")
   }
-
+  // scalastyle:on lineLength
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelForORFilterPushDown.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelForORFilterPushDown.scala
index 3b021b0..f9757fe 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelForORFilterPushDown.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelForORFilterPushDown.scala
@@ -16,20 +16,19 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils
-.isFilterPushedDownToSI;
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
 
 /**
  * test class for verifying the OR filter pushDown filter to SI table
  */
 class TestIndexModelForORFilterPushDown extends QueryTest with BeforeAndAfterAll {
-
+  // scalastyle:off lineLength
   private def dropTables: Unit = {
     sql("drop index if exists index_i1 on or_filter_pushDownValidation")
     sql("drop index if exists index_i2 on or_filter_pushDownValidation")
@@ -205,7 +204,7 @@ class TestIndexModelForORFilterPushDown extends QueryTest with BeforeAndAfterAll
     val query = sql(
       "select count(*) from or_filter_pushDownValidation where designation='SE' OR empname='pramod' OR workgroupcategoryname='developer' OR deptno='14' and deptname='network'")
     val df = query.queryExecution.sparkPlan
-    query.show(false)
+    query.collect()
     if (!isFilterPushedDownToSI(df)) {
       assert(false)
     } else {
@@ -220,7 +219,7 @@ class TestIndexModelForORFilterPushDown extends QueryTest with BeforeAndAfterAll
     val query = sql(
       "select count(*) from default.or_filter_pushDownValidation where designation='SE' OR empname='pramod' OR workgroupcategoryname='developer' OR deptno='14' and deptname='network'")
     val df = query.queryExecution.sparkPlan
-    query.show(false)
+    query.collect()
     if (!isFilterPushedDownToSI(df)) {
       assert(false)
     } else {
@@ -234,4 +233,5 @@ class TestIndexModelForORFilterPushDown extends QueryTest with BeforeAndAfterAll
   override def afterAll: Unit = {
     dropTables
   }
+  // scalastyle:on lineLength
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithAggQueries.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithAggQueries.scala
index cf2bb80..a1336f0 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithAggQueries.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithAggQueries.scala
@@ -16,12 +16,13 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.common.exceptions.sql.MalformedIndexCommandException
-import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.spark.sql.{CarbonEnv, Row}
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.common.exceptions.sql.MalformedIndexCommandException
+import org.apache.carbondata.core.datastore.impl.FileFactory
+
 /**
  * test cases with secondary index and agg queries
  */
@@ -85,7 +86,8 @@ class TestIndexModelWithAggQueries extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql(
       "SELECT sum(cr_return_amount) AS returns, sum(cr_net_loss) AS profit_loss FROM " +
       "catalog_returns, date_dim WHERE cr_returned_date_sk = d_date_sk AND d_date BETWEEN cast" +
-      "('2000-08-03]' AS DATE) AND (cast('2000-08-03' AS DATE) + INTERVAL 30 days)"),Seq(Row(null,null)))
+      "('2000-08-03]' AS DATE) AND (cast('2000-08-03' AS DATE) + INTERVAL 30 days)"),
+      Seq(Row(null, null)))
   }
 
   test("pushing down filter for broadcast join with correct record") {
@@ -108,9 +110,8 @@ class TestIndexModelWithAggQueries extends QueryTest with BeforeAndAfterAll {
 
   test("test index on SI table") {
     sql("drop table if exists test_si_1")
-    sql(
-      "CREATE TABLE test_si_1 (id int,name string,salary float,dob date,address string) STORED AS " +
-      "carbondata")
+    sql("CREATE TABLE test_si_1 (id int,name string,salary float,dob date,address string) " +
+        "STORED AS carbondata")
     sql("insert into test_si_1 select 1,'aa',23423.334,'2009-09-09','df'")
     sql("insert into test_si_1 select 2,'bb',4454.454,'2009-09-09','bang'")
     sql(
@@ -131,12 +132,16 @@ class TestIndexModelWithAggQueries extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists cast_si")
     sql("drop table if exists ctas_cast")
     sql("drop index if exists index5 on cast_si")
-    sql("create table if not exists cast_si (RECORD_ID bigint,CDR_ID string,LOCATION_CODE int,USER_NUM string) STORED AS carbondata " +
+    sql("create table if not exists cast_si (" +
+        "RECORD_ID bigint,CDR_ID string,LOCATION_CODE int,USER_NUM string) STORED AS carbondata " +
         "TBLPROPERTIES('table_blocksize'='256','SORT_SCOPE'='NO_SORT')")
-    sql("create index index5 on table cast_si(USER_NUM) AS 'carbondata' properties('table_blocksize' = '256')")
+    sql("create index index5 on table cast_si(" +
+        "USER_NUM) AS 'carbondata' properties('table_blocksize' = '256')")
     sql("insert into cast_si select  1, 'gb3e5135-5533-4ee7-51b3-F61F1355b471', 2, '26557544541'")
-    sql("create table ctas_cast select cast(location_code as string) as location_code from cast_si where ((user_num in ('26557544541')))")
-    checkAnswer(sql("select count(*) from cast_si where ((user_num in ('26557544541')))"), sql("select count(*) from ctas_cast"))
+    sql("create table ctas_cast select cast(location_code as string) " +
+        "as location_code from cast_si where ((user_num in ('26557544541')))")
+    checkAnswer(sql("select count(*) from cast_si where ((user_num in ('26557544541')))"),
+      sql("select count(*) from ctas_cast"))
   }
 
   test("test clean files for index for marked for delete segments") {
@@ -158,7 +163,8 @@ class TestIndexModelWithAggQueries extends QueryTest with BeforeAndAfterAll {
     }
     sql("clean files for table clean")
     val mainTable = CarbonEnv.getCarbonTable(Some("default"), "clean")(sqlContext.sparkSession)
-    val indexTable = CarbonEnv.getCarbonTable(Some("default"), "clean_index")(sqlContext.sparkSession)
+    val indexTable = CarbonEnv.getCarbonTable(Some("default"), "clean_index")(
+      sqlContext.sparkSession)
     assert(!FileFactory.isFileExist(mainTable.getSegmentPath("0")))
     assert(!FileFactory.isFileExist(indexTable.getSegmentPath("0")))
     assert(FileFactory.isFileExist(mainTable.getSegmentPath("1")))
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithIUD.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithIUD.scala
index 6010d37..6f1b709 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithIUD.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithIUD.scala
@@ -32,27 +32,18 @@ import org.apache.carbondata.sdk.file.{CarbonWriter, Schema}
 class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
-    sql("drop table if exists dest")
-    sql("drop table if exists source")
-    sql("drop table if exists test")
-    sql("drop table if exists sitestmain")
-    sql("drop table if exists dest1")
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest_parquet1")
+    dropIndexAndTable()
   }
 
   test("test index with IUD delete all_rows") {
-
     sql(
       "create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
     sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table dest""")
-    sql("drop index if exists index_dest1 on dest")
     sql("create index index_dest1 on table dest (c3) AS 'carbondata'")
-    sql("drop index if exists index_dest2 on dest")
-    //create second index table , result should be same
+    // create second index table , result should be same
     sql("create index index_dest2 on table dest (c3,c5) AS 'carbondata'")
     // delete all rows in the segment
-    sql("delete from dest d where d.c2 not in (56)").show
+    sql("delete from dest d where d.c2 not in (56)").collect()
     checkAnswer(
       sql("""select c3 from dest"""),
       sql("""select c3 from index_dest1""")
@@ -61,7 +52,7 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
       sql("""select c3,c5 from dest"""),
       sql("""select c3,c5 from index_dest2""")
     )
-    sql("show segments for table index_dest1").show(false)
+    sql("show segments for table index_dest1").collect()
     assert(sql("show segments for table index_dest1").collect()(0).get(1).toString()
              .equals(SegmentStatus.MARKED_FOR_DELETE.getMessage))
     assert(sql("show segments for table index_dest2").collect()(0).get(1).toString()
@@ -70,7 +61,7 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
     // execute clean files
     sql("clean files for table dest")
 
-    sql("show segments for table index_dest2").show()
+    sql("show segments for table index_dest2").collect()
     val exception_index_dest1 = intercept[IndexOutOfBoundsException] {
       assert(sql("show segments for table index_dest1").collect()(0).get(1).toString()
         .equals(SegmentStatus.MARKED_FOR_DELETE.getMessage))
@@ -80,7 +71,7 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
         .equals(SegmentStatus.MARKED_FOR_DELETE.getMessage))
     }
 
-    //load again and check result
+    // load again and check result
     sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table dest""")
     checkAnswer(
       sql("""select c3 from dest"""),
@@ -98,11 +89,10 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
     sql(
       "create table source (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
     sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table source""")
-    sql("drop index if exists index_source1 on source")
     sql("create index index_source1 on table source (c5) AS 'carbondata'")
     // delete (5-1)=4 rows
     try {
-      sql("""delete from source d where d.c2 in (1,2,3,4)""").show
+      sql("""delete from source d where d.c2 in (1,2,3,4)""").collect()
       assert(false)
     }
     catch {
@@ -116,7 +106,6 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
         }
     }
     // crete second index table
-    sql("drop index if exists index_source2 on source")
     sql("create index index_source2 on table source (c3) AS 'carbondata'")
     // result should be same
       checkAnswer(
@@ -124,7 +113,7 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
         sql("""select c3 from index_source2""")
       )
     sql("clean files for table source")
-    sql("show segments for table index_source2").show()
+    sql("show segments for table index_source2").collect()
     assert(sql("show segments for table index_source2").collect()(0).get(1).toString()
       .equals(SegmentStatus.SUCCESS.getMessage))
   }
@@ -133,10 +122,9 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
     sql(
       "create table test (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
     sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table test""")
-    sql("drop index if exists index_test1 on test")
     sql("create index index_test1 on table test (c3) AS 'carbondata'")
     // delete all rows in the segment
-    sql("delete from test d where d.c2 not in (56)").show
+    sql("delete from test d where d.c2 not in (56)").collect()
     checkAnswer(
       sql(
         "select test.c3, index_test1.c3 from test right join index_test1  on test.c3 =  " +
@@ -145,8 +133,7 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test if secondary index gives correct result on limit query after row deletion") {
-    sql("drop table if exists t10")
-    sql("create table t10(id string, country string) STORED AS carbondata").show()
+    sql("create table t10(id string, country string) STORED AS carbondata").collect()
     sql("create index si3 on table t10(country) AS 'carbondata'")
     sql(
       s" load data INPATH '$resourcesPath/secindex/IUD/sample_1.csv' INTO table t10 options " +
@@ -155,46 +142,43 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
       s" load data INPATH '$resourcesPath/secindex/IUD/sample_2.csv' INTO table t10 options " +
       "('DELIMITER'=',','FILEHEADER'='id,country')")
     try {
-      sql("delete from t10 where id in (1,2)").show()
+      sql("delete from t10 where id in (1,2)").collect()
     assert(false)
     }
     catch {
       case ex: Exception => assert(true)
     }
-    sql(" select *  from t10").show()
-    checkAnswer(sql(" select country from t10 where country = 'china' order by id limit 1"), Row("china"))
+    sql(" select *  from t10").collect()
+    checkAnswer(sql(" select country from t10 where country = 'china' order by id limit 1"),
+      Row("china"))
   }
 
   test("test index with IUD delete and compaction") {
-    sql("drop table if exists test")
     sql(
-      "create table test (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table test""")
-    sql("drop index if exists index_test1 on test")
-    sql("create index index_test1 on table test (c3) AS 'carbondata'")
-    sql("delete from test d where d.c2 = '1'").show
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table test""")
-    sql("alter table test compact 'major'")
+      "create table test2 (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
+    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table test2""")
+    sql("create index index_test2 on table test2 (c3) AS 'carbondata'")
+    sql("delete from test2 d where d.c2 = '1'").collect()
+    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table test2""")
+    sql("alter table test2 compact 'major'")
     // delete all rows in the segment
-    sql("delete from test d where d.c2 not in (56)").show
+    sql("delete from test2 d where d.c2 not in (56)").collect()
     checkAnswer(
       sql(
-        "select test.c3, index_test1.c3 from test right join index_test1  on test.c3 =  " +
-        "index_test1.c3"),
+        "select test2.c3, index_test2.c3 from test2 right join index_test2  on test2.c3 =  " +
+        "index_test2.c3"),
       Seq())
   }
 
   test("test set segments with SI") {
-    sql("drop table if exists dest")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table dest""")
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table dest""")
-    sql("drop index if exists index_dest1 on dest")
-    sql("create index index_dest1 on table dest (c3) AS 'carbondata'")
-    checkAnswer(sql("select count(*) from dest"), Seq(Row(10)))
-    sql("set carbon.input.segments.default.dest=0")
-    checkAnswer(sql("select count(*) from dest"), Seq(Row(5)))
-    checkAnswer(sql("select count(*) from index_dest1"), Seq(Row(5)))
+    sql("create table dest2 (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
+    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table dest2""")
+    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO table dest2""")
+    sql("create index index_dest21 on table dest2 (c3) AS 'carbondata'")
+    checkAnswer(sql("select count(*) from dest2"), Seq(Row(10)))
+    sql("set carbon.input.segments.default.dest2=0")
+    checkAnswer(sql("select count(*) from dest2"), Seq(Row(5)))
+    checkAnswer(sql("select count(*) from index_dest21"), Seq(Row(5)))
   }
 
   test("Test block secondary index creation on external table") {
@@ -229,237 +213,249 @@ class TestIndexModelWithIUD extends QueryTest with BeforeAndAfterAll {
     } catch {
       case ex: Throwable => throw new RuntimeException(ex)
     }
-    sql("drop table if exists test")
-    sql(s"create external table test STORED AS carbondata location '$writerPath'")
+    sql(s"create external table test3 STORED AS carbondata location '$writerPath'")
     val exception = intercept[MalformedCarbonCommandException] {
-      sql("create index idx on table test(cert_no) AS 'carbondata'")
+      sql("create index idx_test3 on table test3(cert_no) AS 'carbondata'")
     }
     assert(exception.getMessage
       .contains("Unsupported operation on non transactional table"))
   }
 
   test("test SI with Union and Union All with same table") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest_parquet")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
+    sql("create table dest3 (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
+    sql("insert into dest3 values('a',1,'abc','b')")
+    sql("create table dest3_parquet stored as parquet select * from dest3")
+    sql("create index index_dest3 on table dest3 (c3) AS 'carbondata'")
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest  where c3 = 'abc'"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union select c3 from " +
-          "dest_parquet where c3 = 'abc'"))
-    checkAnswer(sql("select c3 from dest where c3 = 'abc' union all " +
-                    "select c3 from dest where c3 = 'abc'"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union all select c3 from " +
-          "dest_parquet  where c3 = 'abc'"))
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest")
+      "select c3 from dest3 where c3 = 'abc' union select c3 from dest3  where c3 = 'abc'"),
+      sql("select c3 from dest3_parquet where c3 = 'abc' union select c3 from " +
+          "dest3_parquet where c3 = 'abc'"))
+    checkAnswer(sql("select c3 from dest3 where c3 = 'abc' union all " +
+                    "select c3 from dest3 where c3 = 'abc'"),
+      sql("select c3 from dest3_parquet where c3 = 'abc' union all select c3 from " +
+          "dest3_parquet  where c3 = 'abc'"))
   }
 
   test("test SI with Union and Union All with different table") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest1")
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest_parquet1")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create table dest_parquet1 stored as parquet select * from dest")
-    sql("create table dest1 STORED AS carbondata select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
-    sql("create index index_dest1 on table dest1 (c3) AS 'carbondata'")
+    sql("create table dest4 (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
+    sql("insert into dest4 values('a',1,'abc','b')")
+    sql("create table dest4_parquet stored as parquet select * from dest4")
+    sql("create table dest4_parquet1 stored as parquet select * from dest4")
+    sql("create table dest41 STORED AS carbondata select * from dest4")
+    sql("create index index_dest4 on table dest4 (c3) AS 'carbondata'")
+    sql("create index index_dest41 on table dest41 (c3) AS 'carbondata'")
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest1  where c3 = 'abc'"),
+      "select c3 from dest4 where c3 = 'abc' union select c3 from dest41  where c3 = 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union select c3 from " +
-        "dest_parquet1 where c3 = 'abc'"))
-    checkAnswer(sql("select c3 from dest where c3 = 'abc' union all select c3 from dest1 " +
+        "select c3 from dest4_parquet where c3 = 'abc' union select c3 from " +
+        "dest4_parquet1 where c3 = 'abc'"))
+    checkAnswer(sql("select c3 from dest4 where c3 = 'abc' union all select c3 from dest41 " +
                     "where c3 = 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union all select c3 " +
-        "from dest_parquet1 where c3 = 'abc'"))
-    sql("drop table if exists dest")
-    sql("drop table if exists dest1")
-    sql("drop table if exists dest_parquet1")
-    sql("drop table if exists dest_parquet")
+        "select c3 from dest4_parquet where c3 = 'abc' union all select c3 " +
+        "from dest4_parquet1 where c3 = 'abc'"))
   }
 
   test("test SI with more than 2 Union and Union All with different table") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest1")
-    sql("drop table if exists dest_parquet")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create table dest_parquet1 stored as parquet select * from dest")
-    sql("create table dest1 STORED AS carbondata select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
-    sql("create index index_dest1 on table dest1 (c3) AS 'carbondata'")
+    sql("create table dest5 (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
+    sql("insert into dest5 values('a',1,'abc','b')")
+    sql("create table dest5_parquet stored as parquet select * from dest5")
+    sql("create table dest5_parquet1 stored as parquet select * from dest5")
+    sql("create table dest51 STORED AS carbondata select * from dest5")
+    sql("create index index_dest5 on table dest5 (c3) AS 'carbondata'")
+    sql("create index index_dest51 on table dest51 (c3) AS 'carbondata'")
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest1  " +
-      "where c3 = 'abc' union select c3 from dest1  where c3 = 'abc'"),
+      "select c3 from dest5 where c3 = 'abc' union select c3 from dest51  " +
+      "where c3 = 'abc' union select c3 from dest51  where c3 = 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union select c3 from dest_parquet1" +
-        " where c3 = 'abc' union select c3 from dest_parquet1  where c3 = 'abc'"))
+        "select c3 from dest5_parquet where c3 = 'abc' union select c3 from dest5_parquet1" +
+        " where c3 = 'abc' union select c3 from dest5_parquet1  where c3 = 'abc'"))
 
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union all select c3 from dest1 " +
-      "where c3 = 'abc' union all select c3 from dest1  where c3 = 'abc'"),
+      "select c3 from dest5 where c3 = 'abc' union all select c3 from dest51 " +
+      "where c3 = 'abc' union all select c3 from dest51  where c3 = 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union all select c3 from " +
-        "dest_parquet1 where c3 = 'abc' union all select c3 from dest_parquet1 " +
+        "select c3 from dest5_parquet where c3 = 'abc' union all select c3 from " +
+        "dest5_parquet1 where c3 = 'abc' union all select c3 from dest5_parquet1 " +
         "where c3 = 'abc'"))
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest")
-    sql("drop table if exists dest1")
   }
 
   test("test SI with more than 2 Union and Union All with same table") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest_parquet")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
+    sql("create table dest6 (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
+    sql("insert into dest6 values('a',1,'abc','b')")
+    sql("create table dest6_parquet stored as parquet select * from dest6")
+    sql("create index index_dest6 on table dest6 (c3) AS 'carbondata'")
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest  where c3 = 'abc' " +
-      "union select c3 from dest  where c3 = 'abc'"),
+      "select c3 from dest6 where c3 = 'abc' union select c3 from dest6  where c3 = 'abc' " +
+      "union select c3 from dest6  where c3 = 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union select c3 from dest_parquet " +
+        "select c3 from dest6_parquet where c3 = 'abc' union select c3 from dest6_parquet " +
         "where c3 = 'abc'"))
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union all select c3 from dest  where c3 = 'abc' " +
-      "union all select c3 from dest  where c3 = 'abc'"),
+      "select c3 from dest6 where c3 = 'abc' union all select c3 from dest6  where c3 = 'abc' " +
+      "union all select c3 from dest6  where c3 = 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union all select c3 from dest_parquet  " +
-        "where c3 = 'abc' union all select c3 from dest_parquet  where c3 = 'abc'"))
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest")
+        "select c3 from dest6_parquet where c3 = 'abc' union all select c3 from dest6_parquet  " +
+        "where c3 = 'abc' union all select c3 from dest6_parquet  where c3 = 'abc'"))
   }
 
   test("test SI with join") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest_parquet")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS " +
+    sql("create table dest7 (c1 string,c2 int,c3 string,c5 string) STORED AS " +
         "carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
-    checkAnswer(sql("select t1.c3,t2.c3 from dest t1, dest t2 where t1.c3=t2.c3 and t1.c3 = 'abc'"),
-      sql("select t1.c3,t2.c3 from dest_parquet t1, dest t2 where t1.c3=t2.c3 and t1.c3 = 'abc'"))
-    sql("drop table if exists dest")
-    sql("drop table if exists dest_parquet")
+    sql("insert into dest7 values('a',1,'abc','b')")
+    sql("create table dest7_parquet stored as parquet select * from dest7")
+    sql("create index index_dest7 on table dest7 (c3) AS 'carbondata'")
+    checkAnswer(
+      sql("select t1.c3,t2.c3 from dest7 t1, dest7 t2 where t1.c3=t2.c3 and t1.c3 = 'abc'"),
+      sql("select t1.c3,t2.c3 from dest7_parquet t1, dest7 t2 where t1.c3=t2.c3 and t1.c3 = 'abc'"))
   }
 
   test("test SI with Union and Union All with donotPushtoSI operations") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest_parquet")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS " +
+    sql("create table dest8 (c1 string,c2 int,c3 string,c5 string) STORED AS " +
         "carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
+    sql("insert into dest8 values('a',1,'abc','b')")
+    sql("create table dest8_parquet stored as parquet select * from dest8")
+    sql("create index index_dest8 on table dest8 (c3) AS 'carbondata'")
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest  where c3 != 'abc'"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union select c3 from " +
-          "dest_parquet where c3 != 'abc'"))
-    checkAnswer(sql("select c3 from dest where c3 = 'abc' union all " +
-                    "select c3 from dest where c3 != 'abc'"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union all select c3 from " +
-          "dest_parquet  where c3 != 'abc'"))
-    checkAnswer(sql(
-      "select c3 from dest where c3 like '%bc' union select c3 from dest  where c3 not like '%bc'"),
-      sql("select c3 from dest_parquet where c3 like '%bc' union select c3 from " +
-          "dest_parquet where c3 not like '%bc'"))
-    checkAnswer(sql("select c3 from dest where c3 like '%bc' union all " +
-                    "select c3 from dest where c3 not like '%bc'"),
-      sql("select c3 from dest_parquet where c3 like '%bc' union all select c3 from " +
-          "dest_parquet  where c3 not like '%bc'"))
-    checkAnswer(sql(
-      "select c3 from dest where c3 in ('abc') union select c3 from dest  where c3 not in ('abc')"),
-      sql("select c3 from dest_parquet where c3 in ('abc') union select c3 from " +
-          "dest_parquet where c3 not in ('abc')"))
-    checkAnswer(sql("select c3 from dest where c3 in ('abc') union all " +
-                    "select c3 from dest where c3 not in ('abc')"),
-      sql("select c3 from dest_parquet where c3 in ('abc') union all select c3 from " +
-          "dest_parquet  where c3 not in ('abc')"))
+      "select c3 from dest8 where c3 = 'abc' union select c3 from dest8  where c3 != 'abc'"),
+      sql("select c3 from dest8_parquet where c3 = 'abc' union select c3 from " +
+          "dest8_parquet where c3 != 'abc'"))
+    checkAnswer(sql("select c3 from dest8 where c3 = 'abc' union all " +
+                    "select c3 from dest8 where c3 != 'abc'"),
+      sql("select c3 from dest8_parquet where c3 = 'abc' union all select c3 from " +
+          "dest8_parquet  where c3 != 'abc'"))
+    checkAnswer(
+      sql("select c3 from dest8 where c3 like '%bc' union select c3 from dest8 " +
+          "where c3 not like '%bc'"),
+      sql("select c3 from dest8_parquet where c3 like '%bc' union select c3 from " +
+          "dest8_parquet where c3 not like '%bc'"))
+    checkAnswer(sql("select c3 from dest8 where c3 like '%bc' union all " +
+                    "select c3 from dest8 where c3 not like '%bc'"),
+      sql("select c3 from dest8_parquet where c3 like '%bc' union all select c3 from " +
+          "dest8_parquet  where c3 not like '%bc'"))
+    checkAnswer(
+      sql("select c3 from dest8 where c3 in ('abc') union select c3 from dest8 " +
+          "where c3 not in ('abc')"),
+      sql("select c3 from dest8_parquet where c3 in ('abc') union select c3 from " +
+          "dest8_parquet where c3 not in ('abc')"))
+    checkAnswer(sql("select c3 from dest8 where c3 in ('abc') union all " +
+                    "select c3 from dest8 where c3 not in ('abc')"),
+      sql("select c3 from dest8_parquet where c3 in ('abc') union all select c3 from " +
+          "dest8_parquet  where c3 not in ('abc')"))
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest  where ni(c3 = 'abc')"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union select c3 from " +
-          "dest_parquet where c3 = 'abc'"))
-    checkAnswer(sql("select c3 from dest where c3 = 'abc' union all " +
-                    "select c3 from dest where ni(c3 ='abc')"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union all select c3 from " +
-          "dest_parquet  where c3 = 'abc'"))
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest")
+      "select c3 from dest8 where c3 = 'abc' union select c3 from dest8  where ni(c3 = 'abc')"),
+      sql("select c3 from dest8_parquet where c3 = 'abc' union select c3 from " +
+          "dest8_parquet where c3 = 'abc'"))
+    checkAnswer(sql("select c3 from dest8 where c3 = 'abc' union all " +
+                    "select c3 from dest8 where ni(c3 ='abc')"),
+      sql("select c3 from dest8_parquet where c3 = 'abc' union all select c3 from " +
+          "dest8_parquet  where c3 = 'abc'"))
   }
 
-  test("test SI with more than 2 Union and Union All with different table donotPushtoSI operations") {
-    sql("drop table if exists dest")
-    sql("drop table if exists dest1")
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest_parquet1")
-    sql("create table dest (c1 string,c2 int,c3 string,c5 string) STORED AS " +
+  test("test SI with more than 2 Union " +
+       "and Union All with different table donotPushtoSI operations") {
+    sql("create table dest9 (c1 string,c2 int,c3 string,c5 string) STORED AS " +
         "carbondata")
-    sql("insert into dest values('a',1,'abc','b')")
-    sql("create table dest_parquet stored as parquet select * from dest")
-    sql("create table dest_parquet1 stored as parquet select * from dest")
-    sql("create table dest1 STORED AS carbondata select * from dest")
-    sql("create index index_dest on table dest (c3) AS 'carbondata'")
-    sql("create index index_dest1 on table dest1 (c3) AS 'carbondata'")
+    sql("insert into dest9 values('a',1,'abc','b')")
+    sql("create table dest9_parquet stored as parquet select * from dest9")
+    sql("create table dest9_parquet1 stored as parquet select * from dest9")
+    sql("create table dest91 STORED AS carbondata select * from dest9")
+    sql("create index index_dest9 on table dest9 (c3) AS 'carbondata'")
+    sql("create index index_dest91 on table dest91 (c3) AS 'carbondata'")
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest1  " +
-      "where c3 = 'abc' union select c3 from dest1  where c3 != 'abc'"),
+      "select c3 from dest9 where c3 = 'abc' union select c3 from dest91  " +
+      "where c3 = 'abc' union select c3 from dest91  where c3 != 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union select c3 from dest_parquet1" +
-        " where c3 = 'abc' union select c3 from dest_parquet1  where c3 != 'abc'"))
+        "select c3 from dest9_parquet where c3 = 'abc' union select c3 from dest9_parquet1" +
+        " where c3 = 'abc' union select c3 from dest9_parquet1  where c3 != 'abc'"))
 
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union all select c3 from dest1 " +
-      "where c3 = 'abc' union all select c3 from dest1  where c3 != 'abc'"),
+      "select c3 from dest9 where c3 = 'abc' union all select c3 from dest91 " +
+      "where c3 = 'abc' union all select c3 from dest91  where c3 != 'abc'"),
       sql(
-        "select c3 from dest_parquet where c3 = 'abc' union all select c3 from " +
-        "dest_parquet1 where c3 = 'abc' union all select c3 from dest_parquet1 " +
+        "select c3 from dest9_parquet where c3 = 'abc' union all select c3 from " +
+        "dest9_parquet1 where c3 = 'abc' union all select c3 from dest9_parquet1 " +
         "where c3 != 'abc'"))
     checkAnswer(sql(
-      "select c3 from dest where c3 like '%bc' union select c3 from dest1  where c3 not like '%bc'"),
-      sql("select c3 from dest_parquet where c3 like '%bc' union select c3 from " +
-          "dest_parquet1 where c3 not like '%bc'"))
-    checkAnswer(sql("select c3 from dest where c3 like '%bc' union all " +
-                    "select c3 from dest1 where c3 not like '%bc'"),
-      sql("select c3 from dest_parquet where c3 like '%bc' union all select c3 from " +
-          "dest_parquet1  where c3 not like '%bc'"))
+      "select c3 from dest9 where c3 like '%bc' " +
+      "union select c3 from dest91  where c3 not like '%bc'"),
+      sql("select c3 from dest9_parquet where c3 like '%bc' union select c3 from " +
+          "dest9_parquet1 where c3 not like '%bc'"))
+    checkAnswer(sql("select c3 from dest9 where c3 like '%bc' union all " +
+                    "select c3 from dest91 where c3 not like '%bc'"),
+      sql("select c3 from dest9_parquet where c3 like '%bc' union all select c3 from " +
+          "dest9_parquet1  where c3 not like '%bc'"))
     checkAnswer(sql(
-      "select c3 from dest where c3 in ('abc') union select c3 from dest1  where c3 not in ('abc')"),
-      sql("select c3 from dest_parquet where c3 in ('abc') union select c3 from " +
-          "dest_parquet1 where c3 not in ('abc')"))
-    checkAnswer(sql("select c3 from dest where c3 in ('abc') union all " +
-                    "select c3 from dest1 where c3 not in ('abc')"),
-      sql("select c3 from dest_parquet where c3 in ('abc') union all select c3 from " +
-          "dest_parquet1  where c3 not in ('abc')"))
+      "select c3 from dest9 where c3 in ('abc') " +
+      "union select c3 from dest91  where c3 not in ('abc')"),
+      sql("select c3 from dest9_parquet where c3 in ('abc') union select c3 from " +
+          "dest9_parquet1 where c3 not in ('abc')"))
+    checkAnswer(sql("select c3 from dest9 where c3 in ('abc') union all " +
+                    "select c3 from dest91 where c3 not in ('abc')"),
+      sql("select c3 from dest9_parquet where c3 in ('abc') union all select c3 from " +
+          "dest9_parquet1  where c3 not in ('abc')"))
     checkAnswer(sql(
-      "select c3 from dest where c3 = 'abc' union select c3 from dest1  where ni(c3 = 'abc')"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union select c3 from " +
-          "dest_parquet1 where c3 = 'abc'"))
-    checkAnswer(sql("select c3 from dest where c3 = 'abc' union all " +
-                    "select c3 from dest1 where ni(c3 ='abc')"),
-      sql("select c3 from dest_parquet where c3 = 'abc' union all select c3 from " +
-          "dest_parquet1  where c3 = 'abc'"))
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest")
-    sql("drop table if exists dest1")
+      "select c3 from dest9 where c3 = 'abc' union select c3 from dest91  where ni(c3 = 'abc')"),
+      sql("select c3 from dest9_parquet where c3 = 'abc' union select c3 from " +
+          "dest9_parquet1 where c3 = 'abc'"))
+    checkAnswer(sql("select c3 from dest9 where c3 = 'abc' union all " +
+                    "select c3 from dest91 where ni(c3 ='abc')"),
+      sql("select c3 from dest9_parquet where c3 = 'abc' union all select c3 from " +
+          "dest9_parquet1  where c3 = 'abc'"))
   }
 
 
   override def afterAll: Unit = {
+    dropIndexAndTable()
+  }
+
+  private def dropIndexAndTable(): Unit = {
+    sql("drop index if exists index_dest1 on dest")
+    sql("drop index if exists index_dest2 on dest")
     sql("drop table if exists dest")
+    sql("drop index if exists index_source1 on source")
+    sql("drop index if exists index_source2 on source")
     sql("drop table if exists source")
+    sql("drop index if exists index_test1 on test")
     sql("drop table if exists test")
-    sql("drop table if exists sitestmain")
-    sql("drop table if exists dest1")
-    sql("drop table if exists dest_parquet")
-    sql("drop table if exists dest_parquet1")
+    sql("drop index if exists si3 on t10")
+    sql("drop table if exists t10")
+    sql("drop index if exists index_test2 on test2")
+    sql("drop table if exists test2")
+    sql("drop index if exists index_dest21 on dest2")
+    sql("drop table if exists dest2")
+    sql("drop index if exists idx_test3 on test3")
+    sql("drop table if exists test3")
+    sql("drop index if exists index_dest3 on dest3")
+    sql("drop table if exists dest3")
+    sql("drop table if exists dest3_parquet")
+    sql("drop index if exists index_dest4 on dest4")
+    sql("drop table if exists dest4")
+    sql("drop table if exists dest4_parquet")
+    sql("drop table if exists dest4_parquet1")
+    sql("drop index if exists index_dest41 on dest41")
+    sql("drop table if exists dest41")
+    sql("drop index if exists index_dest5 on dest5")
+    sql("drop table if exists dest5")
+    sql("drop table if exists dest5_parquet")
+    sql("drop table if exists dest5_parquet1")
+    sql("drop index if exists index_dest51 on dest51")
+    sql("drop table if exists dest51")
+    sql("drop index if exists index_dest6 on dest6")
+    sql("drop table if exists dest6")
+    sql("drop table if exists dest6_parquet")
+    sql("drop index if exists index_dest7 on dest7")
+    sql("drop table if exists dest7")
+    sql("drop table if exists dest7_parquet")
+    sql("drop index if exists index_dest8 on dest8")
+    sql("drop table if exists dest8")
+    sql("drop table if exists dest8_parquet")
+    sql("drop index if exists index_dest9 on dest9")
+    sql("drop table if exists dest9")
+    sql("drop table if exists dest9_parquet")
+    sql("drop table if exists dest9_parquet1")
+    sql("drop index if exists index_dest91 on dest91")
+    sql("drop table if exists dest91")
   }
+
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithLocalDictionary.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithLocalDictionary.scala
index b06a123..100f1e9 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithLocalDictionary.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithLocalDictionary.scala
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
 import org.apache.spark.sql.test.util.QueryTest
@@ -24,19 +25,23 @@ class TestIndexModelWithLocalDictionary extends QueryTest with BeforeAndAfterAll
     sql("drop table if exists local_sec")
   }
 
-  test("test invalid properties in secondary index creation"){
+  test("test invalid properties in secondary index creation") {
     sql("drop table if exists local_sec")
-    sql("create table local_sec (a string,b string) STORED AS carbondata tblproperties('local_dictionary_enable'='true', 'local_dictionary_exclude'='b','local_dictionary_threshold'='20000')")
+    sql("create table local_sec (a string,b string) STORED AS carbondata tblproperties(" +
+        "'local_dictionary_enable'='true', 'local_dictionary_exclude'='b'," +
+        "'local_dictionary_threshold'='20000')")
     val exception = intercept[Exception] {
-      sql(
-        "create index index1 on table local_sec(b) AS 'carbondata' tblproperties('local_dictionary_enable'='true')")
+      sql("create index index1 on table local_sec(b) AS 'carbondata' " +
+          "tblproperties('local_dictionary_enable'='true')")
     }
-    exception.getMessage.contains("Unsupported Table property in index creation: local_dictionary_enable")
+    exception.getMessage.contains(
+      "Unsupported Table property in index creation: local_dictionary_enable")
   }
 
-  test("test local dictionary for index when main table is disable"){
+  test("test local dictionary for index when main table is disable") {
     sql("drop table if exists local_sec")
-    sql("create table local_sec (a string,b string) STORED AS carbondata tblproperties('local_dictionary_enable'='false')")
+    sql("create table local_sec (a string,b string) STORED AS carbondata " +
+        "tblproperties('local_dictionary_enable'='false')")
     sql("create index index1 on table local_sec(b) AS 'carbondata'")
     checkExistence(sql("DESC FORMATTED index1"), false,
       "Local Dictionary Include")
@@ -44,7 +49,8 @@ class TestIndexModelWithLocalDictionary extends QueryTest with BeforeAndAfterAll
 
   test("test local dictionary for index with default properties when enabled") {
     sql("drop table if exists local_sec")
-    sql("create table local_sec (a string,b string) STORED AS carbondata tblproperties('local_dictionary_enable'='true')")
+    sql("create table local_sec (a string,b string) STORED AS carbondata " +
+        "tblproperties('local_dictionary_enable'='true')")
     sql("create index index1 on table local_sec(b) AS 'carbondata'")
     val descLoc = sql("describe formatted index1").collect
     descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
@@ -63,7 +69,9 @@ class TestIndexModelWithLocalDictionary extends QueryTest with BeforeAndAfterAll
 
   test("test local dictionary for index when index column is dictionary excluded") {
     sql("drop table if exists local_sec")
-    sql("create table local_sec (a string,b string) STORED AS carbondata tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='b','local_dictionary_threshold'='20000')")
+    sql("create table local_sec (a string,b string) STORED AS carbondata " +
+        "tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='b'," +
+        "'local_dictionary_threshold'='20000')")
     sql("create index index1 on table local_sec(b) AS 'carbondata'")
     val descLoc = sql("describe formatted index1").collect
     descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
@@ -80,9 +88,11 @@ class TestIndexModelWithLocalDictionary extends QueryTest with BeforeAndAfterAll
     }
   }
 
-  test("test local dictionary for index when index column is dictionary excluded, but dictionary is disabled") {
+  test("test local dictionary for index " +
+       "when index column is dictionary excluded, but dictionary is disabled") {
     sql("drop table if exists local_sec")
-    sql("create table local_sec (a string,b string) STORED AS carbondata tblproperties('local_dictionary_exclude'='b','local_dictionary_enable'='false')")
+    sql("create table local_sec (a string,b string) STORED AS carbondata " +
+        "tblproperties('local_dictionary_exclude'='b','local_dictionary_enable'='false')")
     sql("create index index1 on table local_sec(b) AS 'carbondata'")
     checkExistence(sql("DESC FORMATTED index1"), false,
       "Local Dictionary Include")
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithUnsafeColumnPage.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithUnsafeColumnPage.scala
index b54ded6..8f1b5f3 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithUnsafeColumnPage.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexModelWithUnsafeColumnPage.scala
@@ -26,6 +26,7 @@ import org.apache.carbondata.core.util.CarbonProperties
 class TestIndexModelWithUnsafeColumnPage extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
+    drop()
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE, "true")
     sql("drop table if exists testSecondryIndex")
@@ -35,12 +36,16 @@ class TestIndexModelWithUnsafeColumnPage extends QueryTest with BeforeAndAfterAl
   }
 
   test("Test secondry index data count") {
-    checkAnswer(sql("select count(*) from testSecondryIndex_IndexTable")
-    ,Seq(Row(1)))
+    checkAnswer(sql("select count(*) from testSecondryIndex_IndexTable"), Seq(Row(1)))
   }
 
   override def afterAll {
-    sql("drop table if exists testIndexTable")
+    drop()
+  }
+
+  private def drop(): Unit = {
+    sql("drop index if exists testSecondryIndex_IndexTable on testSecondryIndex")
+    sql("drop table if exists testSecondryIndex")
   }
 
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexRepair.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexRepair.scala
index 1a041ad..5d6f816 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexRepair.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexRepair.scala
@@ -16,11 +16,10 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils
-.isFilterPushedDownToSI;
-import org.apache.spark.sql.test.util.QueryTest
+import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
 
 /**
  * test cases for testing reindex command on index table/main table/DB level
@@ -28,9 +27,9 @@ import org.apache.spark.sql.test.util.QueryTest
 class TestIndexRepair extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
+    sql("drop index if exists indextable1 on maintable")
+    sql("drop index if exists indextable2 on maintable")
     sql("drop table if exists maintable")
-    sql("drop table if exists indextable1")
-    sql("drop table if exists indextable2")
   }
 
   test("reindex command after deleting segments from SI table") {
@@ -81,6 +80,7 @@ class TestIndexRepair extends QueryTest with BeforeAndAfterAll {
     val postRepairSegments = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE1").count()
     assert(preDeleteSegments == postRepairSegments)
     assert(isFilterPushedDownToSI(df2))
+    sql("drop index if exists indextable1 on test.maintable")
     sql("drop table if exists test.maintable")
     sql("drop database if exists test cascade")
   }
@@ -190,12 +190,12 @@ class TestIndexRepair extends QueryTest with BeforeAndAfterAll {
   }
 
 
-    test("reindex command on database") {
+  test("reindex command on database") {
     sql("drop database if exists test cascade")
     sql("create database test")
     sql("drop table if exists maintable1")
 
-    //table 1
+    // table 1
     sql("CREATE TABLE test.maintable1(a INT, b STRING, c STRING, d STRING) stored as carbondata")
     sql("CREATE INDEX indextable1 on table test.maintable1(c) as 'carbondata'")
     sql("CREATE INDEX indextable2 on table test.maintable1(d) as 'carbondata'")
@@ -238,16 +238,20 @@ class TestIndexRepair extends QueryTest with BeforeAndAfterAll {
     assert(preDeleteSegmentsTableOne == postRepairSegmentsIndexTwo)
     assert(preDeleteSegmentsTableTwo == postRepairSegmentsIndexThree)
     assert(preDeleteSegmentsTableTwo == postRepairSegmentsIndexFour)
+    sql("drop index if exists indextable1 on test.maintable1")
+    sql("drop index if exists indextable2 on test.maintable1")
     sql("drop table if exists test.maintable1")
+    sql("drop index if exists indextable3 on test.maintable2")
+    sql("drop index if exists indextable4 on test.maintable2")
     sql("drop table if exists test.maintable2")
     sql("drop database if exists test cascade")
   }
 
 
   override def afterAll {
+    sql("drop index if exists indextable1 on maintable")
+    sql("drop index if exists indextable2 on maintable")
     sql("drop table if exists maintable")
-    sql("drop table if exists indextable1")
-    sql("drop table if exists indextable2")
   }
 
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexWithIndexModelOnFirstColumnAndSortColumns.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexWithIndexModelOnFirstColumnAndSortColumns.scala
index e85d4a1..caa05e3 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexWithIndexModelOnFirstColumnAndSortColumns.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexWithIndexModelOnFirstColumnAndSortColumns.scala
@@ -20,9 +20,6 @@ import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
 class TestIndexWithIndexModelOnFirstColumnAndSortColumns extends QueryTest with BeforeAndAfterAll {
 
   var count1BeforeIndex : Array[Row] = null
@@ -30,19 +27,22 @@ class TestIndexWithIndexModelOnFirstColumnAndSortColumns extends QueryTest with
   override def beforeAll {
 
     sql("drop table if exists seccust")
+    // scalastyle:off lineLength
     sql("create table seccust (id string, c_custkey string, c_name string, c_address string, c_nationkey string, c_phone string,c_acctbal decimal, c_mktsegment string, c_comment string) " +
         "STORED AS carbondata TBLPROPERTIES ('table_blocksize'='128','SORT_COLUMNS'='c_custkey,c_name','NO_INVERTED_INDEX'='c_nationkey')")
     sql(s"""load data  inpath '${resourcesPath}/secindex/firstunique.csv' into table seccust options('DELIMITER'='|','QUOTECHAR'='"','FILEHEADER'='id,c_custkey,c_name,c_address,c_nationkey,c_phone,c_acctbal,c_mktsegment,c_comment')""")
     sql(s"""load data  inpath '${resourcesPath}/secindex/secondunique.csv' into table seccust options('DELIMITER'='|','QUOTECHAR'='\"','FILEHEADER'='id,c_custkey,c_name,c_address,c_nationkey,c_phone,c_acctbal,c_mktsegment,c_comment')""")
+    // scalastyle:on lineLength
     count1BeforeIndex = sql("select * from seccust where id = '1' limit 1").collect()
     sql("create index sc_indx1 on table seccust(id) AS 'carbondata'")
   }
 
   test("Test secondry index on 1st column and with sort columns") {
-    checkAnswer(sql("select count(*) from seccust where id = '1'"),Row(2))
+    checkAnswer(sql("select count(*) from seccust where id = '1'"), Row(2))
   }
 
   override def afterAll {
-    sql("drop table if exists orders")
+    sql("drop index if exists sc_indx1 on seccust")
+    sql("drop table if exists seccust")
   }
-}
\ No newline at end of file
+}
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithIndex.scala
index 71df59a..33a60e0 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithIndex.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestLikeQueryWithIndex.scala
@@ -21,17 +21,15 @@ import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 /**
-  * Test Class for filter expression query on String datatypes
-  */
+ * Test Class for filter expression query on String datatypes
+ */
 class TestLikeQueryWithIndex extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
     sql("drop table if exists TCarbon")
 
-    sql("CREATE TABLE IF NOT EXISTS TCarbon(ID Int, country String, "+
-          "name String, phonetype String, serialname String) "+
-        "STORED AS carbondata"
-    )
+    sql("CREATE TABLE IF NOT EXISTS TCarbon(ID Int, country String, " +
+        "name String, phonetype String, serialname String) STORED AS carbondata")
     var csvFilePath = s"$resourcesPath/secindex/secondaryIndexLikeTest.csv"
 
     sql(
@@ -47,7 +45,7 @@ class TestLikeQueryWithIndex extends QueryTest with BeforeAndAfterAll {
 
   test("select secondary index like query Contains") {
     val df = sql("select * from TCarbon where name like '%aaa1%'")
-    secondaryIndexTableCheck(df,_.equalsIgnoreCase("TCarbon"))
+    secondaryIndexTableCheck(df, _.equalsIgnoreCase("TCarbon"))
 
     checkAnswer(
       sql("select * from TCarbon where name like '%aaa1%'"),
@@ -58,7 +56,7 @@ class TestLikeQueryWithIndex extends QueryTest with BeforeAndAfterAll {
 
     test("select secondary index like query ends with") {
       val df = sql("select * from TCarbon where name like '%aaa1'")
-      secondaryIndexTableCheck(df,_.equalsIgnoreCase("TCarbon"))
+      secondaryIndexTableCheck(df, _.equalsIgnoreCase("TCarbon"))
 
       checkAnswer(
         sql("select * from TCarbon where name like '%aaa1'"),
@@ -69,7 +67,7 @@ class TestLikeQueryWithIndex extends QueryTest with BeforeAndAfterAll {
 
       test("select secondary index like query starts with") {
         val df = sql("select * from TCarbon where name like 'aaa1%'")
-        secondaryIndexTableCheck(df, Set("insert_index","TCarbon").contains(_))
+        secondaryIndexTableCheck(df, Set("insert_index", "TCarbon").contains(_))
 
         checkAnswer(
           sql("select * from TCarbon where name like 'aaa1%'"),
@@ -78,11 +76,11 @@ class TestLikeQueryWithIndex extends QueryTest with BeforeAndAfterAll {
         )
       }
 
-  def secondaryIndexTableCheck(dataFrame:DataFrame,
-      tableNameMatchCondition :(String) => Boolean): Unit ={
+  def secondaryIndexTableCheck(dataFrame: DataFrame,
+      tableNameMatchCondition: (String) => Boolean): Unit = {
     dataFrame.queryExecution.sparkPlan.collect {
       case bcf: CarbonDatasourceHadoopRelation =>
-        if(!tableNameMatchCondition(bcf.carbonTable.getTableUniqueName)){
+        if (!tableNameMatchCondition(bcf.carbonTable.getTableUniqueName)) {
           assert(true)
         }
     }
@@ -92,4 +90,4 @@ class TestLikeQueryWithIndex extends QueryTest with BeforeAndAfterAll {
     sql("DROP INDEX if exists insert_index ON TCarbon")
     sql("drop table if exists TCarbon")
   }
-}
\ No newline at end of file
+}
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithIndex.scala
index 62e96b5..df457f5 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithIndex.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestNIQueryWithIndex.scala
@@ -16,8 +16,8 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression}
 import org.apache.spark.sql.{CarbonDatasourceHadoopRelation, DataFrame, Row}
+import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression}
 import org.apache.spark.sql.catalyst.planning.PhysicalOperation
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 import org.apache.spark.sql.test.util.QueryTest
@@ -26,7 +26,7 @@ import org.scalatest.BeforeAndAfterAll
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 
 class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
-
+  // scalastyle:off lineLength
   var count1BeforeIndex : Array[Row] = null
   var count2BeforeIndex : Array[Row] = null
 
@@ -147,7 +147,7 @@ class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
     try {
       sql("set carbon.si.lookup.partialstring=true")
       val ch21 = sql("select * from seccust where c_phone like '25%989-741-2988'")
-      //startsWith & endsWith so SI -yes
+      // startsWith & endsWith so SI -yes
       assert(checkSIColumnsSize(ch21, 3)) // size = length, startsWith and EndsWith
 
       val ch22 = sql("select count(*) from seccust where c_phone like '%989-741-2988'")
@@ -163,8 +163,10 @@ class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
       // Query has EqualTo - So SI = Yes
       assert(checkSIColumnsSize(ch24, 1)) // EqualTo
 
-    }finally{
-      sql(s"set carbon.si.lookup.partialstring=${CarbonCommonConstants.ENABLE_SI_LOOKUP_PARTIALSTRING_DEFAULT}")
+    } finally {
+      sql(s"set carbon.si.lookup.partialstring=${
+        CarbonCommonConstants.ENABLE_SI_LOOKUP_PARTIALSTRING_DEFAULT
+      }")
     }
   }
 
@@ -188,14 +190,16 @@ class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
 
       val ch15 = sql("select count(*) from seccust where c_phone='25-989-741-2988' and c_mktsegment like 'BUI%LDING'")
       // equals on c_phone of I1, I2 & (length & startsWith & endswith) on c_mktsegment of I2 so SI - Yes
-      assert(checkSIColumnsSize(ch15, 3)) //size = EqualTo on c_phone, length, StartsWith
+      assert(checkSIColumnsSize(ch15, 3)) // size = EqualTo on c_phone, length, StartsWith
 
       val ch16 = sql("select * from seccust where c_phone='25-989-741-2988'")
       // Query has EqualTo so SI - Yes
       assert(checkSIColumnsSize(ch16, 1)) // size = EqualTo
 
-    } finally{
-      sql(s"set carbon.si.lookup.partialstring=${CarbonCommonConstants.ENABLE_SI_LOOKUP_PARTIALSTRING_DEFAULT}")
+    } finally {
+      sql(s"set carbon.si.lookup.partialstring=${
+        CarbonCommonConstants.ENABLE_SI_LOOKUP_PARTIALSTRING_DEFAULT
+      }")
     }
   }
 
@@ -219,7 +223,7 @@ class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
     sql(
       "select designation from testOrderBy where deptname IN ('network', " +
       "'protocol','security') OR workgroupcategoryname IN ('developer','tester','manager') " +
-      "order by designation desc limit 1").show(false)
+      "order by designation desc limit 1").collect()
     sql("drop table if exists testOrderBy")
   }
 
@@ -239,8 +243,9 @@ class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
       case PhysicalOperation(projects, filters, l: LogicalRelation)
         if l.relation.isInstanceOf[CarbonDatasourceHadoopRelation] =>
         val relation = l.relation.asInstanceOf[CarbonDatasourceHadoopRelation]
-        lazy val ll = filters.map( _.collect {
-          case atr:AttributeReference => atr }
+        lazy val ll = filters.map(_.collect {
+          case atr: AttributeReference => atr
+        }
         ).foldLeft(Seq[Expression]())((cs, s) => cs ++ s)
         relation.carbonTable.isIndexTable && ll.size == size
       case _ => false
@@ -250,4 +255,5 @@ class TestNIQueryWithIndex extends QueryTest with BeforeAndAfterAll{
   override def afterAll: Unit = {
     sql("drop table if exists seccust")
   }
+  // scalastyle:on lineLength
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala
index 295c42c..8cec074 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestRegisterIndexCarbonTable.scala
@@ -35,9 +35,9 @@ class TestRegisterIndexCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("drop database if exists carbon cascade")
   }
 
-  def restoreData(dblocation: String, tableName: String) = {
+  private def restoreData(dblocation: String, tableName: String) = {
     val destination = dblocation + CarbonCommonConstants.FILE_SEPARATOR + tableName
-    val source = dblocation+ "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
+    val source = dblocation + "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
     try {
       FileUtils.copyDirectory(new File(source), new File(destination))
       FileUtils.deleteDirectory(new File(source))
@@ -48,9 +48,9 @@ class TestRegisterIndexCarbonTable extends QueryTest with BeforeAndAfterAll {
 
     }
   }
-  def backUpData(dblocation: String, tableName: String) = {
+  private def backUpData(dblocation: String, tableName: String) = {
     val source = dblocation + CarbonCommonConstants.FILE_SEPARATOR + tableName
-    val destination = dblocation+ "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
+    val destination = dblocation + "_back" + CarbonCommonConstants.FILE_SEPARATOR + tableName
     try {
       FileUtils.copyDirectory(new File(source), new File(destination))
     } catch {
@@ -64,7 +64,8 @@ class TestRegisterIndexCarbonTable extends QueryTest with BeforeAndAfterAll {
     sql("drop database if exists carbon cascade")
     sql(s"create database carbon location '${location}'")
     sql("use carbon")
-    sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED AS carbondata""")
+    sql("create table carbon.carbontable (" +
+        "c1 string,c2 int,c3 string,c5 string) STORED AS carbondata")
     sql("insert into carbontable select 'a',1,'aa','aaa'")
     sql("create index index_on_c3 on table carbontable (c3, c5) AS 'carbondata'")
     backUpData(location, "carbontable")
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala
index 59fbbc2..e2bc7a1 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithAddSegment.scala
@@ -19,7 +19,7 @@ package org.apache.carbondata.spark.testsuite.secondaryindex
 import org.apache.spark.sql.CarbonEnv
 import org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin
 import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.{BeforeAndAfterAll, Ignore}
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.metadata.datatype.Field
@@ -54,7 +54,9 @@ class TestSIWithAddSegment extends QueryTest with BeforeAndAfterAll {
   }
 
   private def dropTables(): Unit = {
+    sql("drop index if exists maintable_si on maintable")
     sql("drop table if exists maintable")
+    sql("drop index if exists maintable1_si on maintable1")
     sql("drop table if exists maintable1")
   }
 
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithComplexArrayType.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithComplexArrayType.scala
index 859979c..53ac7a2 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithComplexArrayType.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithComplexArrayType.scala
@@ -23,7 +23,7 @@ import org.scalatest.BeforeAndAfterEach
 import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
 
 class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
-
+  // scalastyle:off lineLength
   override def beforeEach(): Unit = {
     sql("drop table if exists complextable")
   }
@@ -71,10 +71,10 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     sql("insert into complextable select 2, array('pak'), 'v'")
     sql("insert into complextable select 3, array('china'), 'f'")
     sql("insert into complextable select 4, array('india'),'g'")
-    val result =  sql(" select * from complextable where array_contains(country,'china') and name='f'")
+    val result = sql(" select * from complextable where array_contains(country,'china') and name='f'")
     sql("drop index if exists index_1 on complextable")
     sql("create index index_1 on table complextable(country, name) as 'carbondata'")
-    val df =  sql(" select * from complextable where array_contains(country,'china') and name='f'")
+    val df = sql(" select * from complextable where array_contains(country,'china') and name='f'")
     if (!isFilterPushedDownToSI(df.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -88,10 +88,10 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     sql(
       s"load data inpath '$resourcesPath/secindex/array.csv' into table complextable options('delimiter'=','," +
       "'quotechar'='\"','fileheader'='id,name,country','complex_delimiter_level_1'='$')")
-    val result =  sql(" select * from complextable where array_contains(country,'china')")
+    val result = sql(" select * from complextable where array_contains(country,'china')")
     sql("drop index if exists index_1 on complextable")
     sql("create index index_1 on table complextable(country) as 'carbondata'")
-    val df =  sql(" select * from complextable where array_contains(country,'china')")
+    val df = sql(" select * from complextable where array_contains(country,'china')")
     if (!isFilterPushedDownToSI(df.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -139,7 +139,7 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     val result = sql(" select * from complextable where array_contains(projectdate,cast('2017-02-01' as date))")
     sql("create index index_1 on table complextable(projectdate) as 'carbondata'")
     checkAnswer(sql("select count(*) from index_1"), Seq(Row(2)))
-    val df =  sql(" select * from complextable where array_contains(projectdate,cast('2017-02-01' as date))")
+    val df = sql(" select * from complextable where array_contains(projectdate,cast('2017-02-01' as date))")
     if (!isFilterPushedDownToSI(df.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -156,7 +156,7 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     val result = sql(" select * from complextable where array_contains(projectdate,cast('2017-02-01 00:01:00' as timestamp))")
     sql("create index index_1 on table complextable(projectdate) as 'carbondata'")
     checkAnswer(sql("select count(*) from index_1"), Seq(Row(2)))
-    val df =  sql(" select * from complextable where array_contains(projectdate,cast('2017-02-01 00:01:00' as timestamp))")
+    val df = sql(" select * from complextable where array_contains(projectdate,cast('2017-02-01 00:01:00' as timestamp))")
     if (!isFilterPushedDownToSI(df.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -171,10 +171,10 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     sql("insert into complextable select 2, array('pak'), 'v'")
     sql("insert into complextable select 3, array('china'), 'f'")
     sql("insert into complextable select 4, array('india'),'g'")
-    val result =  sql(" select * from complextable where array_contains(country,'china') and name='f'")
+    val result = sql(" select * from complextable where array_contains(country,'china') and name='f'")
     sql("drop index if exists index_1 on complextable")
     sql("create index index_1 on table complextable(country, name) as 'carbondata'")
-    val df =  sql(" select * from complextable where array_contains(country,'china') and name='f'")
+    val df = sql(" select * from complextable where array_contains(country,'china') and name='f'")
     if (!isFilterPushedDownToSI(df.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -188,13 +188,13 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     sql("insert into complextable select 1, array('china', 'us'), 'b', 'b1'")
     sql("insert into complextable select 2, array('pak', 'india'), 'v', 'v'")
     val result1 = sql("select * from complextable where addr='v' and array_contains(country,'pak')")
-    val result2 =  sql("select * from complextable where array_contains(country,'pak') and addr='v'")
+    val result2 = sql("select * from complextable where array_contains(country,'pak') and addr='v'")
     sql("drop index if exists index_1 on complextable")
     sql("create index index_1 on table complextable(country, name) as 'carbondata'")
     sql("drop index if exists index_2 on complextable")
     sql("create index index_2 on table complextable(addr) as 'carbondata'")
-    val df1 =  sql("select * from complextable where addr='v' and array_contains(country,'pak')")
-    val df2 =  sql("select * from complextable where array_contains(country,'pak') and addr='v'")
+    val df1 = sql("select * from complextable where addr='v' and array_contains(country,'pak')")
+    val df2 = sql("select * from complextable where array_contains(country,'pak') and addr='v'")
     if (!isFilterPushedDownToSI(df1.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -216,7 +216,7 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     val result1 = sql("select * from complextable where array_contains(country,'india') and array_contains(country,'pak')")
     sql("drop index if exists index_1 on complextable")
     sql("create index index_1 on table complextable(country, name) as 'carbondata'")
-    val df1 =  sql("select * from complextable where array_contains(country,'india') and array_contains(country,'pak')")
+    val df1 = sql("select * from complextable where array_contains(country,'india') and array_contains(country,'pak')")
     if (isFilterPushedDownToSI(df1.queryExecution.sparkPlan)) {
       assert(false)
     } else {
@@ -224,5 +224,5 @@ class TestSIWithComplexArrayType extends QueryTest with BeforeAndAfterEach {
     }
     checkAnswer(result1, df1)
   }
-
+  // scalastyle:on lineLength
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithPartition.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithPartition.scala
index 31bd4a2..dbc758b 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithPartition.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithPartition.scala
@@ -16,11 +16,11 @@
  */
 package org.apache.carbondata.spark.testsuite.secondaryindex
 
-import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils
-.isFilterPushedDownToSI;
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.{BeforeAndAfterAll, Ignore}
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
 
 class TestSIWithPartition extends QueryTest with BeforeAndAfterAll {
 
@@ -299,7 +299,7 @@ class TestSIWithPartition extends QueryTest with BeforeAndAfterAll {
       " 'abc'"),
       Seq(Row(4)))
 
-    sql("delete from uniqdata1 where CUST_NAME='CUST_NAME_00108'").show()
+    sql("delete from uniqdata1 where CUST_NAME='CUST_NAME_00108'").collect()
 
     checkAnswer(sql(
       "select count(*) from uniqdata1 where CUST_NAME='CUST_NAME_00108' and ACTIVE_EMUI_VERSION =" +
@@ -325,7 +325,7 @@ class TestSIWithPartition extends QueryTest with BeforeAndAfterAll {
       "select count(*) from uniqdata1 where CUST_ID='9000' and ACTIVE_EMUI_VERSION = 'abc'"),
       Seq(Row(4)))
     intercept[RuntimeException] {
-      sql("update uniqdata1 d set (d.CUST_ID) = ('8000')  where d.CUST_ID = '9000'").show()
+      sql("update uniqdata1 d set (d.CUST_ID) = ('8000')  where d.CUST_ID = '9000'").collect()
     }
   }
 
@@ -359,7 +359,8 @@ class TestSIWithPartition extends QueryTest with BeforeAndAfterAll {
   test("test secondary index with partition table having mutiple partition columns") {
     sql("drop table if exists partition_table")
     sql(s"""
-         | CREATE TABLE partition_table (stringField string, intField int, shortField short, stringField1 string)
+         | CREATE TABLE partition_table (
+         | stringField string, intField int, shortField short, stringField1 string)
          | STORED AS carbondata
          | PARTITIONED BY (hour_ string, date_ string, sec_ string)
          | TBLPROPERTIES ('SORT_COLUMNS'='hour_,date_,stringField', 'SORT_SCOPE'='GLOBAL_SORT')
@@ -368,8 +369,9 @@ class TestSIWithPartition extends QueryTest with BeforeAndAfterAll {
     sql(s"create index si_on_multi_part on partition_table(stringField1) as 'carbondata'")
     sql("insert into partition_table select 'abc', 1,123,'abc1',2,'mon','ten'")
     checkAnswer(sql(s"select count(*) from si_on_multi_part"), Seq(Row(1)))
-    val dataFrame = sql(s"select stringField,date_,sec_ from partition_table where stringField1='abc1'")
-    checkAnswer(dataFrame, Seq(Row("abc","mon","ten")))
+    val dataFrame =
+      sql(s"select stringField,date_,sec_ from partition_table where stringField1='abc1'")
+    checkAnswer(dataFrame, Seq(Row("abc", "mon", "ten")))
     if (!isFilterPushedDownToSI(dataFrame.queryExecution.sparkPlan)) {
       assert(false)
     } else {
diff --git a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
similarity index 67%
rename from index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala
rename to index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
index ea66e75..ebc98dd 100644
--- a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondryIndex.scala
+++ b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
@@ -19,26 +19,22 @@ package org.apache.carbondata.spark.testsuite.secondaryindex
 import scala.collection.JavaConverters._
 
 import org.apache.commons.lang3.StringUtils
-
-import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
-import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
 import org.apache.spark.sql.{CarbonEnv, Row}
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus, SegmentStatusManager}
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.spark.exception.ProcessMetaDataException
-import org.apache.spark.sql.test.util.QueryTest
+import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI
 
-class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
+class TestSIWithSecondaryIndex extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
-    sql("drop index if exists si_altercolumn on table_WithSIAndAlter")
-    sql("drop table if exists table_WithSIAndAlter")
-    sql("drop table if exists table_drop_columns")
-    sql("drop table if exists table_drop_columns_fail")
+    dropIndexAndTable()
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
         CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
@@ -46,8 +42,8 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
     sql("create table table_WithSIAndAlter(c1 string, c2 date,c3 timestamp) STORED AS carbondata")
     sql("insert into table_WithSIAndAlter select 'xx',current_date, current_timestamp")
     sql("alter table table_WithSIAndAlter add columns(date1 date, time timestamp)")
-    sql("update table_WithSIAndAlter set(date1) = (c2)").show
-    sql("update table_WithSIAndAlter set(time) = (c3)").show
+    sql("update table_WithSIAndAlter set(date1) = (c2)").collect()
+    sql("update table_WithSIAndAlter set(time) = (c3)").collect()
     sql("create index si_altercolumn on table table_WithSIAndAlter(date1,time) AS 'carbondata'")
   }
 
@@ -66,9 +62,10 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
   }
 
   test ("test alter drop all columns of the SI table") {
-    sql("create table table_drop_columns (name string, id string, country string) stored as carbondata")
+    sql("create table table_drop_columns (" +
+        "name string, id string, country string) stored as carbondata")
     sql("insert into table_drop_columns select 'xx', '1', 'china'")
-    sql("create index index_1 on table table_drop_columns(id, country) as 'carbondata'")
+    sql("create index tdc_index_1 on table table_drop_columns(id, country) as 'carbondata'")
     // alter table to drop all the columns used in index
     sql("alter table table_drop_columns drop columns(id, country)")
     sql("insert into table_drop_columns select 'xy'")
@@ -76,9 +73,10 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
   }
 
   test ("test alter drop few columns of the SI table") {
-    sql("create table table_drop_columns_fail (name string, id string, country string) stored as carbondata")
+    sql("create table table_drop_columns_fail (" +
+        "name string, id string, country string) stored as carbondata")
     sql("insert into table_drop_columns_fail select 'xx', '1', 'china'")
-    sql("create index index_1 on table table_drop_columns_fail(id, country) as 'carbondata'")
+    sql("create index tdcf_index_1 on table table_drop_columns_fail(id, country) as 'carbondata'")
     // alter table to drop few columns used in index. This should fail as we are not dropping all
     // the index columns
     assert(intercept[ProcessMetaDataException](sql(
@@ -87,7 +85,6 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test create secondary index global sort after insert") {
-    sql("drop table if exists table1")
     sql("create table table1 (name string, id string, country string) stored as carbondata")
     sql("insert into table1 select 'xx', '2', 'china' union all select 'xx', '1', 'india'")
     sql("create index table1_index on table table1(id, country) as 'carbondata' properties" +
@@ -108,179 +105,172 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
       "('sort_scope'='global_sort', 'Global_sort_partitions'='-1')"))
       .getMessage
       .contains("Table property global_sort_partitions : -1 is invalid"))
-    sql("drop index table1_index on table1")
-    sql("drop table table1")
   }
 
   test("test create secondary index global sort before insert") {
-    sql("drop table if exists table1")
-    sql("create table table1 (name string, id string, country string) stored as carbondata")
-    sql("create index table1_index on table table1(id, country) as 'carbondata' properties" +
+    sql("create table table11 (name string, id string, country string) stored as carbondata")
+    sql("create index table11_index on table table11(id, country) as 'carbondata' properties" +
         "('sort_scope'='global_sort', 'Global_sort_partitions'='3')")
-    sql("insert into table1 select 'xx', '2', 'china' union all select 'xx', '1', 'india'")
-    checkAnswerWithoutSort(sql("select id, country from table1_index"),
+    sql("insert into table11 select 'xx', '2', 'china' union all select 'xx', '1', 'india'")
+    checkAnswerWithoutSort(sql("select id, country from table11_index"),
       Seq(Row("1", "india"), Row("2", "china")))
     // check for valid sort_scope
-    checkExistence(sql("describe formatted table1_index"), true, "Sort Scope global_sort")
-    sql("drop index table1_index on table1")
-    sql("drop table table1")
+    checkExistence(sql("describe formatted table11_index"), true, "Sort Scope global_sort")
   }
 
   test("test create secondary index global sort on partition table") {
-    sql("drop table if exists partition_carbon_table")
-    sql("create table partition_carbon_table (name string, id string, country string) PARTITIONED BY(dateofjoin " +
+    sql("create table partition_carbon_table (" +
+        "name string, id string, country string) PARTITIONED BY(dateofjoin " +
       "string) stored as carbondata")
     // create SI before the inserting the data
-    sql("create index partition_carbon_table_index on table partition_carbon_table(id, country) as 'carbondata' properties" +
+    sql("create index partition_carbon_table_index on table partition_carbon_table(" +
+        "id, country) as 'carbondata' properties" +
         "('sort_scope'='global_sort', 'Global_sort_partitions'='3')")
     sql("insert into partition_carbon_table select 'xx', '2', 'china', '2020' " +
         "union all select 'xx', '1', 'india', '2021'")
     checkAnswerWithoutSort(sql("select id, country from partition_carbon_table_index"),
       Seq(Row("1", "india"), Row("2", "china")))
     // check for valid sort_scope
-    checkExistence(sql("describe formatted partition_carbon_table_index"), true, "Sort Scope global_sort")
+    checkExistence(sql("describe formatted partition_carbon_table_index"),
+      true, "Sort Scope global_sort")
     sql("drop index partition_carbon_table_index on partition_carbon_table")
     // create SI after the inserting the data
-    sql("create index partition_carbon_table_index on table partition_carbon_table(id, country) as 'carbondata' properties" +
+    sql("create index partition_carbon_table_index on table partition_carbon_table(" +
+        "id, country) as 'carbondata' properties" +
         "('sort_scope'='global_sort', 'Global_sort_partitions'='3')")
     checkAnswerWithoutSort(sql("select id, country from partition_carbon_table_index"),
       Seq(Row("1", "india"), Row("2", "china")))
     // check for valid sort_scope
-    checkExistence(sql("describe formatted partition_carbon_table_index"), true, "Sort Scope global_sort")
-    sql("drop table partition_carbon_table")
+    checkExistence(sql("describe formatted partition_carbon_table_index"),
+      true,
+      "Sort Scope global_sort")
   }
 
   test("test array<string> and string as index columns on secondary index with global sort") {
-    sql("drop table if exists complextable")
     sql(
       "create table complextable (id string, country array<string>, name string) stored as " +
       "carbondata")
     sql("insert into complextable select 1, array('china', 'us'), 'b' union all select 2, array" +
         "('pak', 'india', 'china'), 'v' ")
     sql("drop index if exists complextable_index_1 on complextable")
-    sql("create index complextable_index_1 on table complextable(country, name) as 'carbondata' properties" +
-        "('sort_scope'='global_sort', 'Global_sort_partitions'='3')")
+    sql("create index complextable_index_1 on table complextable(country, name) " +
+        "as 'carbondata' properties('sort_scope'='global_sort', 'Global_sort_partitions'='3')")
     checkAnswerWithoutSort(sql("select country,name from complextable_index_1"),
       Seq(Row("china", "b"), Row("china", "v"), Row("india", "v"), Row("pak", "v"), Row("us", "b")))
     // check for valid sort_scope
     checkExistence(sql("describe formatted complextable_index_1"), true, "Sort Scope global_sort")
-    sql("drop index complextable_index_1 on complextable")
-    sql("drop table complextable")
   }
 
   test("Test secondry index data count") {
-    checkAnswer(sql("select count(*) from si_altercolumn")
-      ,Seq(Row(1)))
+    checkAnswer(sql("select count(*) from si_altercolumn"), Seq(Row(1)))
   }
 
   test("test create secondary index when all records are deleted from table") {
-    sql("drop table if exists delete_records")
     sql("create table delete_records (a string,b string) STORED AS carbondata")
     sql("insert into delete_records values('k','r')")
     sql("insert into delete_records values('k','r')")
     sql("insert into delete_records values('k','r')")
-    sql("delete from delete_records where a='k'").show()
+    sql("delete from delete_records where a='k'").collect()
     sql("alter table delete_records compact 'minor'")
-    sql("create index index1 on table delete_records(b) AS 'carbondata'")
-    checkAnswer(sql("select count(*) from index1"), Row(0))
-    sql("drop table if exists delete_records")
+    sql("create index dr_index1 on table delete_records(b) AS 'carbondata'")
+    checkAnswer(sql("select count(*) from dr_index1"), Row(0))
   }
 
   test("test secondary index data after parent table rename") {
+    sql("drop index if exists m_index21 on maintable")
     sql("drop table if exists maintable")
-    sql("drop table if exists maintableeee")
     sql("create table maintable (a string,b string, c int) STORED AS carbondata")
     sql("insert into maintable values('k','x',2)")
     sql("insert into maintable values('k','r',1)")
-    sql("create index index21 on table maintable(b) AS 'carbondata'")
-    checkAnswer(sql("select * from maintable where c>1"), Seq(Row("k","x",2)))
+    sql("create index m_index21 on table maintable(b) AS 'carbondata'")
+    checkAnswer(sql("select * from maintable where c>1"), Seq(Row("k", "x", 2)))
     sql("ALTER TABLE maintable RENAME TO maintableeee")
-    checkAnswer(sql("select * from maintableeee where c>1"), Seq(Row("k","x",2)))
+    checkAnswer(sql("select * from maintableeee where c>1"), Seq(Row("k", "x", 2)))
   }
 
   test("test secondary index with cache_level as blocklet") {
-    sql("drop table if exists maintable")
-    sql("create table maintable (a string,b string,c int) STORED AS carbondata")
-    sql("insert into maintable values('k','x',2)")
-    sql("create index indextable on table maintable(b) AS 'carbondata'")
-    sql("ALTER TABLE maintable SET TBLPROPERTIES('CACHE_LEVEL'='BLOCKLET')")
-    checkAnswer(sql("select * from maintable where b='x'"), Seq(Row("k","x",2)))
-    sql("drop table maintable")
+    sql("create table maintable2 (a string,b string,c int) STORED AS carbondata")
+    sql("insert into maintable2 values('k','x',2)")
+    sql("create index m_indextable2 on table maintable2(b) AS 'carbondata'")
+    sql("ALTER TABLE maintable2 SET TBLPROPERTIES('CACHE_LEVEL'='BLOCKLET')")
+    checkAnswer(sql("select * from maintable2 where b='x'"), Seq(Row("k", "x", 2)))
   }
 
   test("test secondary index with cache_level as blocklet on partitioned table") {
-    sql("drop table if exists partitionTable")
-    sql("create table partitionTable (a string,b string) partitioned by (c int) STORED AS carbondata")
+    sql("create table partitionTable (" +
+        "a string,b string) partitioned by (c int) STORED AS carbondata")
     sql("insert into partitionTable values('k','x',2)")
-    sql("create index indextable on table partitionTable(b) AS 'carbondata'")
+    sql("create index p_indextable on table partitionTable(b) AS 'carbondata'")
     sql("ALTER TABLE partitionTable SET TBLPROPERTIES('CACHE_LEVEL'='BLOCKLET')")
-    checkAnswer(sql("select * from partitionTable where b='x'"), Seq(Row("k","x",2)))
+    checkAnswer(sql("select * from partitionTable where b='x'"), Seq(Row("k", "x", 2)))
     sql("drop table partitionTable")
   }
 
   test("validate column_meta_cache and cache_level on SI table") {
-    sql("drop table if exists column_meta_cache")
-    sql("create table column_meta_cache(c1 String, c2 String, c3 int, c4 double) STORED AS carbondata")
-    sql("create index indexCache on table column_meta_cache(c2,c1) AS 'carbondata' PROPERTIES('COLUMN_meta_CachE'='c2','cache_level'='BLOCK')")
-    assert(isExpectedValueValid("default", "indexCache", "column_meta_cache", "c2"))
-    assert(isExpectedValueValid("default", "indexCache", "cache_level", "BLOCK"))
+    sql("create table column_meta_cache(" +
+        "c1 String, c2 String, c3 int, c4 double) STORED AS carbondata")
+    sql("create index cmc_indexCache on table column_meta_cache(c2,c1) " +
+        "AS 'carbondata' PROPERTIES('COLUMN_meta_CachE'='c2','cache_level'='BLOCK')")
+    assert(isExpectedValueValid("default", "cmc_indexCache", "column_meta_cache", "c2"))
+    assert(isExpectedValueValid("default", "cmc_indexCache", "cache_level", "BLOCK"))
     // set invalid values for SI table for column_meta_cache and cache_level and verify
     intercept[MalformedCarbonCommandException] {
-      sql("create index indexCache1 on table column_meta_cache(c2) AS 'carbondata' PROPERTIES('COLUMN_meta_CachE'='abc')")
+      sql("create index cmc_indexCache1 on table column_meta_cache(c2) " +
+          "AS 'carbondata' PROPERTIES('COLUMN_meta_CachE'='abc')")
     }
     intercept[MalformedCarbonCommandException] {
-      sql("create index indexCache1 on table column_meta_cache(c2) AS 'carbondata' PROPERTIES('cache_level'='abc')")
+      sql("create index cmc_indexCache1 on table column_meta_cache(c2) " +
+          "AS 'carbondata' PROPERTIES('cache_level'='abc')")
     }
     intercept[Exception] {
-      sql("Alter table indexCache SET TBLPROPERTIES('column_meta_cache'='abc')")
+      sql("Alter table cmc_indexCache SET TBLPROPERTIES('column_meta_cache'='abc')")
     }
     intercept[Exception] {
-      sql("Alter table indexCache SET TBLPROPERTIES('CACHE_LEVEL'='abc')")
+      sql("Alter table cmc_indexCache SET TBLPROPERTIES('CACHE_LEVEL'='abc')")
     }
     // alter table to unset these properties on SI table
-    sql("Alter table indexCache UNSET TBLPROPERTIES('column_meta_cache')")
-    var descResult = sql("describe formatted indexCache")
+    sql("Alter table cmc_indexCache UNSET TBLPROPERTIES('column_meta_cache')")
+    var descResult = sql("describe formatted cmc_indexCache")
     checkExistence(descResult, false, "COLUMN_META_CACHE")
-    sql("Alter table indexCache UNSET TBLPROPERTIES('cache_level')")
-    descResult = sql("describe formatted indexCache")
+    sql("Alter table cmc_indexCache UNSET TBLPROPERTIES('cache_level')")
+    descResult = sql("describe formatted cmc_indexCache")
     checkExistence(descResult, true, "Min/Max Index Cache Level")
-    //alter SI table to set the properties again
-    sql("Alter table indexCache SET TBLPROPERTIES('column_meta_cache'='c1')")
-    assert(isExpectedValueValid("default", "indexCache", "column_meta_cache", "c1"))
+    // alter SI table to set the properties again
+    sql("Alter table cmc_indexCache SET TBLPROPERTIES('column_meta_cache'='c1')")
+    assert(isExpectedValueValid("default", "cmc_indexCache", "column_meta_cache", "c1"))
     // set empty value for column_meta_cache
-    sql("Alter table indexCache SET TBLPROPERTIES('column_meta_cache'='')")
-    assert(isExpectedValueValid("default", "indexCache", "column_meta_cache", ""))
+    sql("Alter table cmc_indexCache SET TBLPROPERTIES('column_meta_cache'='')")
+    assert(isExpectedValueValid("default", "cmc_indexCache", "column_meta_cache", ""))
     // set cache_level to blocklet
-    sql("Alter table indexCache SET TBLPROPERTIES('cache_level'='BLOCKLET')")
-    assert(isExpectedValueValid("default", "indexCache", "cache_level", "BLOCKLET"))
+    sql("Alter table cmc_indexCache SET TBLPROPERTIES('cache_level'='BLOCKLET')")
+    assert(isExpectedValueValid("default", "cmc_indexCache", "cache_level", "BLOCKLET"))
   }
 
   test("test parallel load of SI to main table") {
-    sql("drop table if exists uniqdata")
     sql("CREATE table uniqdata (empno int, empname String, " +
         "designation String, doj Timestamp, workgroupcategory int, " +
         "workgroupcategoryname String, deptno int, deptname String, projectcode int, " +
         "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
         "utilization int,salary int) STORED AS carbondata")
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdata OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdata OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdata OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdata OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdata OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
-    sql("create index index1 on table uniqdata (workgroupcategoryname) AS 'carbondata'")
-    val indexTable = CarbonEnv.getCarbonTable(Some("default"), "index1")(sqlContext.sparkSession)
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdata " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdata " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdata " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdata " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdata " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
+    sql("create index ud_index1 on table uniqdata (workgroupcategoryname) AS 'carbondata'")
+    val indexTable = CarbonEnv.getCarbonTable(Some("default"), "ud_index1")(sqlContext.sparkSession)
     val carbontable = CarbonEnv.getCarbonTable(Some("default"), "uniqdata")(sqlContext.sparkSession)
     val details = SegmentStatusManager.readLoadMetadata(indexTable.getMetadataPath)
-    val failSegments = List("3","4")
+    val failSegments = List("3", "4")
     sql(s"""set carbon.si.repair.limit = 2""")
     var loadMetadataDetailsList = Array[LoadMetadataDetails]()
     details.foreach{detail =>
-      if(failSegments.contains(detail.getLoadName)){
+      if (failSegments.contains(detail.getLoadName)) {
         val loadmetadatadetail = detail
         loadmetadatadetail.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE)
         loadMetadataDetailsList +:= loadmetadatadetail
@@ -294,16 +284,18 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
       CarbonTablePath.TABLE_STATUS_FILE,
       loadMetadataDetailsList)
 
-    sql(s"""ALTER TABLE default.index1 SET
+    sql(s"""ALTER TABLE default.ud_index1 SET
            |SERDEPROPERTIES ('isSITableEnabled' = 'false')""".stripMargin)
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdata OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdata " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
     val count1 = sql("select * from uniqdata where workgroupcategoryname = 'developer'").count()
-    val df1 = sql("select * from uniqdata where workgroupcategoryname = 'developer'").queryExecution.sparkPlan
-    sql(s"""ALTER TABLE default.index1 SET
+    val df1 = sql("select * from uniqdata where workgroupcategoryname = 'developer'")
+      .queryExecution.sparkPlan
+    sql(s"""ALTER TABLE default.ud_index1 SET
            |SERDEPROPERTIES ('isSITableEnabled' = 'false')""".stripMargin)
     val count2 = sql("select * from uniqdata where workgroupcategoryname = 'developer'").count()
-    val df2 = sql("select * from uniqdata where workgroupcategoryname = 'developer'").queryExecution.sparkPlan
+    val df2 = sql("select * from uniqdata where workgroupcategoryname = 'developer'")
+      .queryExecution.sparkPlan
     sql(s"""set carbon.si.repair.limit = 1""")
     assert(count1 == count2)
     assert(isFilterPushedDownToSI(df1))
@@ -311,7 +303,6 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test drop table on index table") {
-    sql("drop table if exists uniqdataTable")
     sql("CREATE table uniqdataTable (empno int, empname String, " +
         "designation String, doj Timestamp, workgroupcategory int, " +
         "workgroupcategoryname String, deptno int, deptname String, projectcode int, " +
@@ -319,8 +310,8 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
         "utilization int,salary int) STORED AS carbondata")
     sql(
       "create index uniqdataindex1 on table uniqdataTable (workgroupcategoryname) AS 'carbondata'")
-    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO " +
-        "TABLE uniqdataTable OPTIONS('DELIMITER'=',', 'BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE uniqdataTable " +
+        "OPTIONS('DELIMITER'=',','BAD_RECORDS_LOGGER_ENABLE'='FALSE','BAD_RECORDS_ACTION'='FORCE')")
     val errorMessage = intercept[Exception] {
       sql("drop table uniqdataindex1")
     }.getMessage
@@ -328,15 +319,13 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test SI creation on two tables with the same name") {
-    sql("drop table if exists uniqdataTable1")
-    sql("drop table if exists uniqdataTable2")
     sql("CREATE table uniqdataTable1 (empno int, empname String, " +
         "designation String, doj Timestamp, workgroupcategory int, " +
         "workgroupcategoryname String, deptno int, deptname String, projectcode int, " +
         "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
         "utilization int,salary int) STORED AS carbondata")
-    sql(
-      "create index uniqdataidxtable on table uniqdataTable1 (workgroupcategoryname) AS 'carbondata'")
+    sql("create index uniqdataidxtable on table uniqdataTable1 (" +
+        "workgroupcategoryname) AS 'carbondata'")
 
     sql("CREATE table uniqdataTable2 (empno int, empname String, " +
         "designation String, doj Timestamp, workgroupcategory int, " +
@@ -344,26 +333,30 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
         "projectjoindate Timestamp, projectenddate Timestamp, attendance int, " +
         "utilization int,salary int) STORED AS carbondata")
     val errorMessage = intercept[Exception] {
-      sql(
-        "create index uniqdataidxtable on table uniqdataTable2 (workgroupcategoryname) AS 'carbondata'")
+      sql("create index uniqdataidxtable on table uniqdataTable2 (" +
+          "workgroupcategoryname) AS 'carbondata'")
     }.getMessage
-    assert(errorMessage.contains("Index [uniqdataidxtable] already exists under database [default]"))
+    assert(errorMessage.contains(
+      "Index [uniqdataidxtable] already exists under database [default]"))
   }
 
   test("test date type with SI table") {
-    sql("drop table if exists maintable")
-    sql("CREATE TABLE maintable (id int,name string,salary float,dob date,address string) STORED AS carbondata")
-    sql("insert into maintable values(1,'aa',23423.334,'2009-09-06','df'),(1,'aa',23423.334,'2009-09-07','df')")
-    sql("insert into maintable select 2,'bb',4454.454,'2009-09-09','bang'")
-    sql("drop index if exists index_date on maintable")
-    sql("create index index_date on table maintable(dob) AS 'carbondata'")
-    val df = sql("select id,name,dob from maintable where dob = '2009-09-07'")
+    sql("drop index if exists m_index_date on maintable3")
+    sql("drop table if exists maintable3")
+    sql("CREATE TABLE maintable3 (" +
+        "id int,name string,salary float,dob date,address string) STORED AS carbondata")
+    sql("insert into maintable3 values(" +
+        "1,'aa',23423.334,'2009-09-06','df'),(1,'aa',23423.334,'2009-09-07','df')")
+    sql("insert into maintable3 select 2,'bb',4454.454,'2009-09-09','bang'")
+    sql("create index m_index_date on table maintable3(dob) AS 'carbondata'")
+    val df = sql("select id,name,dob from maintable3 where dob = '2009-09-07'")
     assert(isFilterPushedDownToSI(df.queryExecution.sparkPlan))
-    checkAnswer(df, Seq(Row(1,"aa", java.sql.Date.valueOf("2009-09-07"))))
-    sql("drop table if exists maintable")
+    checkAnswer(df, Seq(Row(1, "aa", java.sql.Date.valueOf("2009-09-07"))))
   }
 
   test("test SI order by limit push down") {
+    sql("drop index if exists table2_index1 on table2")
+    sql("drop index if exists table2_index2 on table2")
     sql("drop table if exists table2")
     sql("CREATE TABLE `table2` (`imsi` STRING, `carno` STRING, `longitude` STRING, `city` " +
       "STRING, `starttime` BIGINT, `endtime` BIGINT) STORED AS carbondata TBLPROPERTIES" +
@@ -424,26 +417,60 @@ class TestSIWithSecondryIndex extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test SI creation with special char column") {
-    sql("drop table if exists special_char")
-    sql("create table special_char(`i#d` string, `nam(e` string,`ci)&#@!ty` string,`a\be` int, `ag!e` float, `na^me1` Decimal(8,4)) stored as carbondata")
+    sql("create table special_char(`i#d` string, `nam(e` string,`ci)&#@!ty` string," +
+        "`a\be` int, `ag!e` float, `na^me1` Decimal(8,4)) stored as carbondata")
     sql("create index special_char_index on table special_char(`nam(e`) as 'carbondata'")
     sql("insert into special_char values('1','joey','hud', 2, 2.2, 2.3456)")
-    val plan = sql("explain select * from special_char where `nam(e` = 'joey'").collect()(0).toString()
+    val plan =
+      sql("explain select * from special_char where `nam(e` = 'joey'").collect()(0).toString()
     assert(plan.contains("special_char_index"))
     val df = sql("describe formatted special_char_index").collect()
     assert(df.exists(_.get(0).toString.contains("nam(e")))
   }
 
   override def afterAll {
-    sql("drop index si_altercolumn on table_WithSIAndAlter")
+    dropIndexAndTable()
+  }
+
+  private def dropIndexAndTable(): Unit = {
+    sql("drop index if exists si_altercolumn on table_WithSIAndAlter")
     sql("drop table if exists table_WithSIAndAlter")
-    sql("drop table if exists maintable")
+    sql("drop index if exists tdc_index_1 on table_drop_columns")
+    sql("drop table if exists table_drop_columns")
+    sql("drop index if exists tdcf_index_1 on table_drop_columns_fail")
+    sql("drop table if exists table_drop_columns_fail")
+    sql("drop index if exists table1_index on table1")
+    sql("drop table if exists table1")
+    sql("drop index if exists table11_index on table11")
+    sql("drop table if exists table11")
+    sql("drop index if exists partition_carbon_table_index on partition_carbon_table")
+    sql("drop table if exists partition_carbon_table")
+    sql("drop index if exists complextable_index_1 on complextable")
+    sql("drop table if exists complextable")
+    sql("drop index if exists dr_index1 on delete_records")
+    sql("drop table if exists delete_records")
+    sql("drop index if exists m_index21 on maintable")
     sql("drop table if exists maintableeee")
+    sql("drop index if exists m_indextable2 on maintable2")
+    sql("drop table if exists maintable2")
+    sql("drop index if exists p_indextable on partitionTable")
+    sql("drop table if exists partitionTable")
+    sql("drop index if exists cmc_indexCache on column_meta_cache")
+    sql("drop index if exists cmc_indexCache1 on column_meta_cache")
     sql("drop table if exists column_meta_cache")
+    sql("drop index if exists ud_index1 on uniqdata")
     sql("drop table if exists uniqdata")
+    sql("drop index if exists uniqdataindex1 on uniqdataTable")
     sql("drop table if exists uniqdataTable")
-    sql("drop table if exists table_drop_columns")
-    sql("drop table if exists table_drop_columns_fail")
+    sql("drop index if exists uniqdataidxtable on uniqdataTable1")
+    sql("drop table if exists uniqdataTable1")
+    sql("drop table if exists uniqdataTable2")
+    sql("drop index if exists m_index_date on maintable3")
+    sql("drop table if exists maintable3")
+    sql("drop index if exists table2_index1 on table2")
+    sql("drop index if exists table2_index2 on table2")
+    sql("drop table if exists table2")
+    sql("drop index if exists special_char_index on special_char")
     sql("drop table if exists special_char")
   }
 }
diff --git a/index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala b/index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
index 5e81256..5a030b2 100644
--- a/index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
+++ b/index/secondary-index/src/test/scala/org/apache/spark/util/TestCarbonSegmentUtil.scala
@@ -18,7 +18,6 @@ package org.apache.spark.util
 
 import java.util
 
-import org.junit.Test
 import scala.collection.JavaConverters._
 
 import org.apache.spark.sql.CarbonEnv
@@ -28,6 +27,7 @@ import org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin
 import org.apache.spark.sql.secondaryindex.util.SecondaryIndexUtil
 import org.apache.spark.sql.test.{SparkTestQueryExecutor, TestQueryExecutor}
 import org.apache.spark.sql.test.util.QueryTest
+import org.junit.Test
 
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatusManager}
 import org.apache.carbondata.spark.rdd.CarbonScanRDD
@@ -113,7 +113,7 @@ class TestCarbonSegmentUtil extends QueryTest {
     sql(s"delete from table $tableName where SEGMENT.ID in (3)")
     sql(s"delete from table $tableName where SEGMENT.ID in (2)")
     sql(s"delete from table $tableName where SEGMENT.ID in (1)")
-    sql(s"show segments for table $tableName").show(false)
+    sql(s"show segments for table $tableName").collect()
     val expected = SecondaryIndexUtil
       .identifySegmentsToBeMerged(SparkTestQueryExecutor.spark,
         tableName,
@@ -191,7 +191,9 @@ class TestCarbonSegmentUtil extends QueryTest {
     segments.add(load)
     val expected = SecondaryIndexUtil
       .getMergedLoadName(segments)
+    // scalastyle:off println
     println(expected)
+    // scalastyle:on println
     assert(expected.equalsIgnoreCase("Segment_0.1"))
     dropTables(tableName)
   }
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
index 3b18931..8bcc3b4 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
@@ -18,8 +18,8 @@
 package org.apache.carbon.flink
 
 import java.text.SimpleDateFormat
-import java.util.concurrent.{Executors, TimeUnit}
 import java.util.{Base64, Properties}
+import java.util.concurrent.Executors
 
 import org.apache.flink.api.common.restartstrategy.RestartStrategies
 import org.apache.flink.api.java.functions.KeySelector
@@ -27,19 +27,18 @@ import org.apache.flink.core.fs.Path
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
 import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 import org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.api.CarbonStore
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.path.CarbonTablePath
 
-import org.scalatest.BeforeAndAfterAll
-
-class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
+class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll {
 
   val tableName = "test_flink_partition"
   val dataTempPath = targetTestClass + "/data/temp/"
@@ -58,7 +57,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
 
       val dataCount = 1000
       val source = getTestSource(dataCount)
-      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment, source)
+      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment,
+        source)
 
       sql(s"INSERT INTO $tableName STAGE")
 
@@ -78,7 +78,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
       environment.enableCheckpointing(2000L)
       val dataCount = 1000
       val source = getTestSource(dataCount)
-      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment, source)
+      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment,
+        source)
 
       // 1. Test "SHOW SEGMENT ON $tableanme WITH STAGE"
       var rows = sql(s"SHOW SEGMENTS ON $tableName WITH STAGE").collect()
@@ -190,7 +191,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
 
       val dataCount = 1000
       val source = getTestSource(dataCount)
-      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment, source)
+      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment,
+        source)
 
       Thread.sleep(5000)
       val executorService = Executors.newFixedThreadPool(10)
@@ -211,7 +213,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
     sql(
       s"""
          | CREATE TABLE $tableName (stringField string, intField int, shortField short,
-         | structField struct<value1:string,value2:int,value3:int>, binaryField struct<value1:binary>)
+         | structField struct<value1:string,value2:int,value3:int>,
+         | binaryField struct<value1:binary>)
          | STORED AS carbondata
          | PARTITIONED BY (hour_ string, date_ string)
          | TBLPROPERTIES ('SORT_COLUMNS'='hour_,date_,stringField', 'SORT_SCOPE'='GLOBAL_SORT')
@@ -247,7 +250,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
           Thread.sleep(5000L)
         }
       }
-      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment, source)
+      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment,
+        source)
 
       sql(s"INSERT INTO $tableName STAGE")
 
@@ -258,7 +262,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
 
       val rows = sql(s"SELECT * FROM $tableName limit 1").collect()
       assertResult(1)(rows.length)
-      assertResult(Array[Byte](2, 3, 4))(rows(0).get(rows(0).fieldIndex("binaryfield")).asInstanceOf[GenericRowWithSchema](0))
+      assertResult(Array[Byte](2, 3, 4))(rows(0).get(rows(0).fieldIndex("binaryfield"))
+        .asInstanceOf[GenericRowWithSchema](0))
 
     }
   }
@@ -280,7 +285,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
 
       val dataCount = 10
       val source = getTestSource(dataCount)
-      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment, source)
+      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment,
+        source)
 
       sql(s"INSERT INTO $tableName STAGE")
 
@@ -303,7 +309,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
     createPartitionTable
     // create materialized view
     sql(s"drop materialized view if exists mv_1")
-    sql(s"create materialized view mv_1 as select stringField, shortField from $tableName where intField=9")
+    sql("create materialized view mv_1 " +
+        s"as select stringField, shortField from $tableName where intField=9")
 
     try {
       val tablePath = storeLocation + "/" + tableName + "/"
@@ -317,7 +324,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
 
       val dataCount = 10
       val source = getTestSource(dataCount)
-      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment, source)
+      executeStreamingEnvironment(tablePath, writerProperties, carbonProperties, environment,
+        source)
 
       sql(s"INSERT INTO $tableName STAGE")
 
@@ -327,7 +335,7 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
         case l: LogicalRelation => l.catalogTable.get
       }
       assert(tables.exists(_.identifier.table.equalsIgnoreCase("mv_1")))
-      checkAnswer(df, Seq(Row("test9",12345)))
+      checkAnswer(df, Seq(Row("test9", 12345)))
 
     }
   }
@@ -356,7 +364,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
     sql(s"DROP TABLE IF EXISTS $tableName")
     sql(
       s"""
-         | CREATE TABLE $tableName (stringField string, intField int, shortField short, stringField1 string)
+         | CREATE TABLE $tableName (
+         | stringField string, intField int, shortField short, stringField1 string)
          | STORED AS carbondata
          | PARTITIONED BY (hour_ string)
          | TBLPROPERTIES ('SORT_COLUMNS'='hour_,stringField', 'SORT_SCOPE'='GLOBAL_SORT')
@@ -380,7 +389,9 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll{
       new Properties,
       writerProperties,
       carbonProperties)
-    val streamSink = StreamingFileSink.forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory).build
+    val streamSink = StreamingFileSink
+      .forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory)
+      .build
 
     stream.keyBy(new KeySelector[Array[AnyRef], AnyRef] {
       override def getKey(value: Array[AnyRef]): AnyRef = value(3) // return hour_
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
index 235421f..aa6d440 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
@@ -27,9 +27,10 @@ import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
 import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink
 import org.apache.spark.sql.{CarbonEnv, Row}
 import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.execution.exchange.Exchange
 import org.apache.spark.sql.secondaryindex.joins.BroadCastSIFilterPushJoin
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.api.CarbonStore
 import org.apache.carbondata.core.constants.CarbonCommonConstants
@@ -38,8 +39,6 @@ import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
 
-import org.scalatest.BeforeAndAfterAll
-
 class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
 
   val tableName = "test_flink"
@@ -60,17 +59,21 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       checkAnswer(sql(s"select count(1) from $tableName"), Seq(Row(0)))
 
       // query with stage input
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_QUERY_STAGE_INPUT, "true")
+      CarbonProperties.getInstance()
+        .addProperty(CarbonCommonConstants.CARBON_QUERY_STAGE_INPUT, "true")
       checkAnswer(sql(s"select count(*) from $tableName"), Seq(Row(1000)))
-      sql(s"select * from $tableName limit 10").show
+      sql(s"select * from $tableName limit 10").collect()
       checkAnswer(sql(s"select max(intField) from $tableName"), Seq(Row(999)))
-      checkAnswer(sql(s"select count(intField) from $tableName where intField >= 900"), Seq(Row(100)))
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_QUERY_STAGE_INPUT, "false")
+      checkAnswer(sql(s"select count(intField) from $tableName where intField >= 900"),
+        Seq(Row(100)))
+      CarbonProperties.getInstance()
+        .addProperty(CarbonCommonConstants.CARBON_QUERY_STAGE_INPUT, "false")
 
       sql(s"INSERT INTO $tableName STAGE")
 
       checkAnswer(sql(s"SELECT count(*) FROM $tableName"), Seq(Row(1000)))
-      checkAnswer(sql(s"select count(intField) from $tableName where intField >= 900"), Seq(Row(100)))
+      checkAnswer(sql(s"select count(intField) from $tableName where intField >= 900"),
+        Seq(Row(100)))
       checkIfStageFilesAreDeleted(tablePath)
     }
   }
@@ -100,13 +103,15 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
     sql(s"DROP TABLE IF EXISTS $bucketTableName").collect()
     sql(
       s"""
-         | CREATE TABLE $tableName (stringField string, intField int, shortField short, stringField1 string)
+         | CREATE TABLE $tableName (
+         | stringField string, intField int, shortField short, stringField1 string)
          | STORED AS carbondata TBLPROPERTIES ('BUCKET_NUMBER'='10', 'BUCKET_COLUMNS'='stringField')
       """.stripMargin
     ).collect()
     sql(
       s"""
-         | CREATE TABLE $bucketTableName (stringField string, intField int, shortField short, stringField1 string)
+         | CREATE TABLE $bucketTableName (
+         | stringField string, intField int, shortField short, stringField1 string)
          | STORED AS carbondata TBLPROPERTIES ('BUCKET_NUMBER'='10', 'BUCKET_COLUMNS'='stringField')
       """.stripMargin
     ).collect()
@@ -119,7 +124,8 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       executeFlinkStreamingEnvironment(environment, writerProperties, carbonProperties)
 
       sql(s"INSERT INTO $tableName STAGE OPTIONS ('batch_file_count' = '5')")
-      val table = CarbonEnv.getCarbonTable(Option("default"), s"$tableName")(sqlContext.sparkSession)
+      val table = CarbonEnv.getCarbonTable(
+        Option("default"), s"$tableName")(sqlContext.sparkSession)
       val segmentDir = FileFactory.getCarbonFile(table.getTablePath + "/Fact/Part0/Segment_0")
       val dataFiles = segmentDir.listFiles(new CarbonFileFilter {
         override def accept(file: CarbonFile): Boolean = file.getName.endsWith(".carbondata")
@@ -132,7 +138,8 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
         override def accept(file: CarbonFile): Boolean = file.getName.endsWith(".carbondata")
       })
       assert(dataFiles2.length == 10)
-      checkAnswer(sql(s"SELECT count(*) FROM $tableName where stringField != 'AAA'"), Seq(Row(1000)))
+      checkAnswer(sql(s"SELECT count(*) FROM $tableName where stringField != 'AAA'"),
+        Seq(Row(1000)))
       sql(s"insert into $bucketTableName select * from $tableName").collect()
 
       val plan = sql(
@@ -196,9 +203,12 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       assert(isFilterHitSecondaryIndex)
 
       // check if query hits bloom filter
-      checkAnswer(sql(s"select intField,stringField1 from $tableName where intField = 99"), Seq(Row(99, "si99")))
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
-      val explainBloom = sql(s"explain select intField,stringField1 from $tableName where intField = 99").collect()
+      checkAnswer(sql(s"select intField,stringField1 from $tableName where intField = 99"),
+        Seq(Row(99, "si99")))
+      CarbonProperties.getInstance()
+        .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
+      val explainBloom =
+        sql(s"explain select intField,stringField1 from $tableName where intField = 99").collect()
       assert(explainBloom(0).getString(0).contains(
         """
           |Table Scan on test_flink
@@ -218,7 +228,8 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
     createTable
     // create materialized view
     sql(s"drop materialized view if exists mv_1")
-    sql(s"create materialized view mv_1 as select stringField, shortField from $tableName where intField=99 ")
+    sql(s"create materialized view mv_1 " +
+        s"as select stringField, shortField from $tableName where intField=99 ")
     try {
       val tablePath = storeLocation + "/" + tableName + "/"
       val writerProperties = newWriterProperties(dataTempPath)
@@ -238,7 +249,7 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
         case l: LogicalRelation => l.catalogTable.get
       }
       assert(tables.exists(_.identifier.table.equalsIgnoreCase("mv_1")))
-      checkAnswer(df, Seq(Row("test99",12345)))
+      checkAnswer(df, Seq(Row("test99", 12345)))
       checkIfStageFilesAreDeleted(tablePath)
     }
   }
@@ -370,7 +381,9 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       new Properties,
       writerProperties,
       carbonProperties)
-    val streamSink = StreamingFileSink.forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory).build
+    val streamSink = StreamingFileSink
+      .forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory)
+      .build
     stream.addSink(streamSink)
 
     try environment.execute
@@ -384,13 +397,17 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
   private def checkIfStageFilesAreDeleted(tablePath: String): Unit = {
     // ensure the stage snapshot file and all stage files are deleted
     assertResult(false)(FileFactory.isFileExist(CarbonTablePath.getStageSnapshotFile(tablePath)))
-    assertResult(true)(FileFactory.getCarbonFile(CarbonTablePath.getStageDir(tablePath)).listFiles().isEmpty)
+    assertResult(true)(FileFactory
+      .getCarbonFile(CarbonTablePath.getStageDir(tablePath))
+      .listFiles()
+      .isEmpty)
   }
 
   private def createTable = {
     sql(s"DROP TABLE IF EXISTS $tableName")
     sql(s"""
-           | CREATE TABLE $tableName (stringField string, intField int, shortField short, stringField1 string)
+           | CREATE TABLE $tableName (
+           | stringField string, intField int, shortField short, stringField1 string)
            | STORED AS carbondata
       """.stripMargin)
   }
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestDeleteStageFiles.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestDeleteStageFiles.scala
index a84ede4..5880951 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestDeleteStageFiles.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestDeleteStageFiles.scala
@@ -85,7 +85,9 @@ class TestDeleteStageFiles extends QueryTest {
         writerProperties,
         carbonProperties
       )
-      val streamSink = StreamingFileSink.forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory).build
+      val streamSink = StreamingFileSink
+        .forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory)
+        .build
 
       stream.addSink(streamSink)
 
@@ -102,7 +104,10 @@ class TestDeleteStageFiles extends QueryTest {
 
       // ensure the stage snapshot file and all stage files are deleted
       assertResult(false)(FileFactory.isFileExist(CarbonTablePath.getStageSnapshotFile(tablePath)))
-      assertResult(true)(FileFactory.getCarbonFile(CarbonTablePath.getStageDir(tablePath)).listFiles().isEmpty)
+      assertResult(true)(FileFactory
+        .getCarbonFile(CarbonTablePath.getStageDir(tablePath))
+        .listFiles()
+        .isEmpty)
 
       sql(s"DELETE FROM TABLE $tableName STAGE OPTIONS('retain_hour'='0')")
       val dataLocation = new File(CarbonTablePath.getStageDataDir(tablePath))
@@ -163,7 +168,9 @@ class TestDeleteStageFiles extends QueryTest {
         writerProperties,
         carbonProperties
       )
-      val streamSink = StreamingFileSink.forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory).build
+      val streamSink = StreamingFileSink
+        .forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory)
+        .build
 
       stream.addSink(streamSink)
 
@@ -180,7 +187,10 @@ class TestDeleteStageFiles extends QueryTest {
 
       // ensure the stage snapshot file and all stage files are deleted
       assertResult(false)(FileFactory.isFileExist(CarbonTablePath.getStageSnapshotFile(tablePath)))
-      assertResult(true)(FileFactory.getCarbonFile(CarbonTablePath.getStageDir(tablePath)).listFiles().isEmpty)
+      assertResult(true)(FileFactory
+        .getCarbonFile(CarbonTablePath.getStageDir(tablePath))
+        .listFiles()
+        .isEmpty)
 
       sql(s"DELETE FROM TABLE $tableName STAGE OPTIONS('retain_hour'='0')")
       val dataLocation = new File(CarbonTablePath.getStageDataDir(tablePath))
@@ -240,7 +250,9 @@ class TestDeleteStageFiles extends QueryTest {
         writerProperties,
         carbonProperties
       )
-      val streamSink = StreamingFileSink.forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory).build
+      val streamSink = StreamingFileSink
+        .forBulkFormat(new Path(ProxyFileSystem.DEFAULT_URI), factory)
+        .build
 
       stream.addSink(streamSink)
 
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestSource.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestSource.scala
index d257ce1..dec575a 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestSource.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestSource.scala
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.carbon.flink
 
 import java.util.Random
@@ -7,7 +24,8 @@ import org.apache.flink.runtime.state.{FunctionInitializationContext, FunctionSn
 import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction
 import org.apache.flink.streaming.api.functions.source.SourceFunction
 
-abstract class TestSource(val dataCount: Int) extends SourceFunction[Array[AnyRef]] with CheckpointedFunction {
+abstract class TestSource(val dataCount: Int)
+  extends SourceFunction[Array[AnyRef]] with CheckpointedFunction {
   private var dataIndex = 0
   private var dataIndexState: ListState[Integer] = _
   private var running = false
@@ -44,10 +62,13 @@ abstract class TestSource(val dataCount: Int) extends SourceFunction[Array[AnyRe
 
   @throws[Exception]
   override def initializeState(context: FunctionInitializationContext): Unit = {
-    this.dataIndexState = context.getOperatorStateStore.getListState(new ListStateDescriptor[Integer]("dataIndex", classOf[Integer]))
-    if (!context.isRestored) return
-    import scala.collection.JavaConversions._
-    for (dataIndex <- this.dataIndexState.get) {
+    this.dataIndexState = context.getOperatorStateStore
+      .getListState(new ListStateDescriptor[Integer]("dataIndex", classOf[Integer]))
+    if (!context.isRestored) {
+      return
+    }
+    import scala.collection.JavaConverters._
+    for (dataIndex <- this.dataIndexState.get().asScala) {
       this.dataIndex = dataIndex
     }
   }
@@ -59,4 +80,4 @@ object TestSource {
     override def initialValue(): Random = new Random()
   }
 
-}
\ No newline at end of file
+}
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeLocalDictTest.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeLocalDictTest.scala
index 9652a92..050ae4b 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeLocalDictTest.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeLocalDictTest.scala
@@ -79,7 +79,11 @@ class PrestoAllDataTypeLocalDictTest extends FunSuiteLike with BeforeAndAfterAll
     prestoServer.execute("drop table if exists testdb.testtable")
     prestoServer.execute("drop schema if exists testdb")
     prestoServer.execute("create schema testdb")
-    prestoServer.execute("create table testdb.testtable(ID int, date date, country varchar, name varchar, phonetype varchar, serialname varchar,salary double, bonus decimal(10,4), monthlyBonus decimal(18,4), dob timestamp, shortField smallint, iscurrentemployee boolean) with(format='CARBON') ")
+    prestoServer.execute(
+      "create table testdb.testtable(ID int, date date, country varchar, name varchar, " +
+      "phonetype varchar, serialname varchar,salary double, bonus decimal(10,4), " +
+      "monthlyBonus decimal(18,4), dob timestamp, shortField smallint, " +
+      "iscurrentemployee boolean) with(format='CARBON') ")
     CarbonDataStoreCreator
       .createCarbonStore(storePath,
         s"$rootPath/integration/presto/src/test/resources/alldatatype.csv", true)
@@ -218,7 +222,9 @@ class PrestoAllDataTypeLocalDictTest extends FunSuiteLike with BeforeAndAfterAll
         "SELECT ID,DATE,COUNTRY,NAME,PHONETYPE,SERIALNAME,SALARY,BONUS FROM TESTDB.TESTTABLE " +
         "WHERE BONUS>1234 AND ID<2 GROUP BY ID,DATE,COUNTRY,NAME,PHONETYPE,SERIALNAME,SALARY," +
         "BONUS ORDER BY ID")
+    // scalastyle:off println
     actualResult.foreach(println)
+    // scalastyle:on println
     val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 1,
       "NAME" -> "anubhav",
       "BONUS" -> java.math.BigDecimal.valueOf(1234.4440).setScale(4),
@@ -256,7 +262,7 @@ class PrestoAllDataTypeLocalDictTest extends FunSuiteLike with BeforeAndAfterAll
   test("test for null operator on date data type") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT ID FROM TESTDB.TESTTABLE WHERE DATE IS NULL")
-    val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 9),Map("ID" -> null))
+    val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 9), Map("ID" -> null))
     assert(actualResult.equals(expectedResult))
 
   }
@@ -278,7 +284,9 @@ class PrestoAllDataTypeLocalDictTest extends FunSuiteLike with BeforeAndAfterAll
 
   test("test timestamp datatype using cast operator") {
     val actualResult: List[Map[String, Any]] = prestoServer
-      .executeQuery("SELECT NAME AS RESULT FROM TESTDB.TESTTABLE WHERE DOB = CAST('2016-04-14 15:00:09' AS TIMESTAMP)")
+      .executeQuery(
+        "SELECT NAME AS RESULT FROM TESTDB.TESTTABLE " +
+        "WHERE DOB = CAST('2016-04-14 15:00:09' AS TIMESTAMP)")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> "jatin"))
     assert(actualResult.equals(expectedResult))
   }
@@ -294,4 +302,4 @@ class PrestoAllDataTypeLocalDictTest extends FunSuiteLike with BeforeAndAfterAll
     FileFactory.createNewFile(s"$storePath/testdb/.DS_STORE")
   }
 
-}
\ No newline at end of file
+}
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeTest.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeTest.scala
index b854363..d93c474 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeTest.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoAllDataTypeTest.scala
@@ -30,7 +30,6 @@ import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
 import org.apache.carbondata.presto.server.PrestoServer
 
-
 class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
 
   private val logger = LogServiceFactory
@@ -80,7 +79,10 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     prestoServer.execute("drop table if exists testdb.testtable")
     prestoServer.execute("drop schema if exists testdb")
     prestoServer.execute("create schema testdb")
-    prestoServer.execute("create table testdb.testtable(ID int, date date, country varchar, name varchar, phonetype varchar, serialname varchar,salary double, bonus decimal(10,4), monthlyBonus decimal(18,4), dob timestamp, shortField smallint, iscurrentemployee boolean) with(format='CARBON') ")
+    prestoServer.execute(
+      "create table testdb.testtable(ID int, date date, country varchar, name varchar, phonetype " +
+      "varchar, serialname varchar,salary double, bonus decimal(10,4), monthlyBonus decimal(18,4)" +
+      ", dob timestamp, shortField smallint, iscurrentemployee boolean) with(format='CARBON') ")
     CarbonDataStoreCreator
       .createCarbonStore(storePath,
         s"$rootPath/integration/presto/src/test/resources/alldatatype.csv")
@@ -99,6 +101,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 11))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for count() clause with distinct operator in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT COUNT(DISTINCT ID) AS RESULT FROM TESTDB.TESTTABLE ")
@@ -106,42 +109,49 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
 
   }
+
   test("test the result for sum()in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT SUM(ID) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 54))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for sum() wiTh distinct operator in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT SUM(DISTINCT ID) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 45))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for avg() with distinct operator in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT AVG(DISTINCT ID) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 5))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for min() with distinct operator in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT MIN(DISTINCT ID) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 1))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for max() with distinct operator in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT MAX(DISTINCT ID) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 9))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for count()clause with distinct operator on decimal column in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT COUNT(DISTINCT BONUS) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 10))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for count()clause with out  distinct operator on decimal column in presto")
   {
     val actualResult: List[Map[String, Any]] = prestoServer
@@ -149,12 +159,14 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 10))
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for sum()with out distinct operator for decimal column in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT SUM(DISTINCT BONUS) AS RESULT FROM TESTDB.TESTTABLE ")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> 20774.6475))
     assert(actualResult.toString().equals(expectedResult.toString()))
   }
+
   test("test the result for sum() with distinct operator for decimal column in presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT SUM(DISTINCT BONUS) AS RESULT FROM TESTDB.TESTTABLE ")
@@ -163,6 +175,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
       actualResult.head("RESULT").toString.toDouble ==
       expectedResult.head("RESULT").toString.toDouble)
   }
+
   test("test the result for avg() with distinct operator on decimal on presto") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT AVG(DISTINCT BONUS) AS RESULT FROM TESTDB.TESTTABLE ")
@@ -185,6 +198,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
       "RESULT" -> java.math.BigDecimal.valueOf(9999.999).setScale(4)))
     assert(actualResult.equals(expectedResult))
   }
+
   test("select decimal data type with ORDER BY  clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT DISTINCT BONUS FROM TESTDB.TESTTABLE ORDER BY BONUS limit 3 ")
@@ -194,6 +208,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
       Map("BONUS" -> java.math.BigDecimal.valueOf(500.88).setScale(4)))
     assert(actualResult.equals(expectedResult))
   }
+
   test("select string type with order by clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT NAME FROM TESTDB.TESTTABLE ORDER BY NAME")
@@ -211,6 +226,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     )
     assert(actualResult.equals(expectedResult))
   }
+
   test("select DATE type with order by clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT DATE FROM TESTDB.TESTTABLE ORDER BY DATE")
@@ -233,6 +249,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     })
     assert(actualResult.reverse.head("DATE") == null)
   }
+
   test("select int type with order by clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT DISTINCT ID FROM TESTDB.TESTTABLE ORDER BY ID")
@@ -247,9 +264,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
       Map("ID" -> 9),
       Map("ID" -> null)
     )
-
     assert(actualResult.equals(expectedResult))
-
   }
 
   test("test and filter clause with greater than expression") {
@@ -275,8 +290,6 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
         "COUNTRY" -> "china",
         "PHONETYPE" -> "phone2441"))
     assert(actualResult.toString() equals expectedResult.toString())
-
-
   }
 
   test("test and filter clause with greater than equal to expression") {
@@ -320,6 +333,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
         "PHONETYPE" -> "phone2441"))
     assert(actualResult.toString() equals expectedResult.toString())
   }
+
   test("test and filter clause with less than equal to expression") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
@@ -345,6 +359,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
         "PHONETYPE" -> "phone1904"))
     assert(actualResult.toString() equals expectedResult.toString())
   }
+
   test("test equal to expression on decimal value") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
@@ -354,6 +369,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
 
     assert(actualResult equals expectedResult)
   }
+
   test("test less than expression with and operator") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
@@ -370,6 +386,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
       "PHONETYPE" -> "phone197"))
     assert(actualResult.toString().equals(expectedResult.toString()))
   }
+
   test("test the result for in clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT NAME from testdb.testtable WHERE PHONETYPE IN('phone1848','phone706')")
@@ -380,6 +397,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
 
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for not in clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
@@ -394,13 +412,15 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
 
     assert(actualResult.equals(expectedResult))
   }
+
   test("test for null operator on date data type") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT ID FROM TESTDB.TESTTABLE WHERE DATE IS NULL")
-    val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 9),Map("ID" -> null))
+    val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 9), Map("ID" -> null))
     assert(actualResult.equals(expectedResult))
 
   }
+
   test("test for not null operator on date data type") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT NAME FROM TESTDB.TESTTABLE WHERE DATE IS NOT NULL AND ID=9")
@@ -408,6 +428,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
 
   }
+
   test("test for not null operator on timestamp type") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT NAME FROM TESTDB.TESTTABLE WHERE DOB IS NOT NULL AND ID=9")
@@ -416,6 +437,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
 
   }
+
   test("test for null operator on timestamp type") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT NAME FROM TESTDB.TESTTABLE WHERE DOB IS NULL AND ID=1")
@@ -423,6 +445,7 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
 
   }
+
   test("test the result for short datatype with order by clause") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
@@ -438,14 +461,16 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
 
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for short datatype in clause where field is null") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
         "SELECT ID from testdb.testtable WHERE SHORTFIELD IS NULL ORDER BY SHORTFIELD ")
-    val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 7),Map("ID" -> null))
+    val expectedResult: List[Map[String, Any]] = List(Map("ID" -> 7), Map("ID" -> null))
 
     assert(actualResult.equals(expectedResult))
   }
+
   test("test the result for short datatype with greater than operator") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery(
@@ -474,43 +499,49 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
   }
 
   test("test timestamp datatype using cast operator") {
-    val actualResult: List[Map[String, Any]] = prestoServer
-      .executeQuery("SELECT NAME AS RESULT FROM TESTDB.TESTTABLE WHERE DOB = CAST('2016-04-14 15:00:09' AS TIMESTAMP)")
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT NAME AS RESULT FROM TESTDB.TESTTABLE " +
+      "WHERE DOB = CAST('2016-04-14 15:00:09' AS TIMESTAMP)")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> "jatin"))
     assert(actualResult.equals(expectedResult))
   }
 
   test("test timestamp datatype using cast and in operator") {
-    val actualResult: List[Map[String, Any]] = prestoServer
-      .executeQuery("SELECT ID AS RESULT FROM TESTDB.TESTTABLE WHERE DOB in (cast('2016-04-14 " +
-                    "15:00:09' as timestamp),cast('2015-10-07' as timestamp),cast('2015-10-07 01:00:03' as timestamp))")
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT ID AS RESULT FROM TESTDB.TESTTABLE WHERE DOB in (cast('2016-04-14 15:00:09' as " +
+      "timestamp),cast('2015-10-07' as timestamp),cast('2015-10-07 01:00:03' as timestamp))")
     val expectedResult: List[Map[String, Any]] = List(Map("RESULT" -> "2"))
     assert(actualResult.toString() equals expectedResult.toString())
   }
+
   test("test the boolean data type") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT isCurrentEmployee AS RESULT FROM TESTDB.TESTTABLE WHERE ID=1")
     assert(actualResult.head("RESULT").toString.toBoolean)
   }
+
   test("test the boolean data type for null value") {
     val actualResult: List[Map[String, Any]] = prestoServer
       .executeQuery("SELECT id AS RESULT FROM TESTDB.TESTTABLE WHERE isCurrentEmployee is null")
     assert(actualResult.head("RESULT").toString.toInt==2)
   }
+
   test("test the boolean data type for not null value with filter ") {
-    val actualResult: List[Map[String, Any]] = prestoServer
-      .executeQuery("SELECT id AS RESULT FROM TESTDB.TESTTABLE WHERE isCurrentEmployee is NOT null AND ID>8")
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT id AS RESULT FROM TESTDB.TESTTABLE WHERE isCurrentEmployee is NOT null AND ID>8")
     assert(actualResult.head("RESULT").toString.toInt==9)
   }
 
-  test("test the show schemas result"){
+  test("test the show schemas result") {
    val actualResult = prestoServer.executeQuery("SHOW SCHEMAS")
-    assert(actualResult.equals(List(Map("Schema" -> "information_schema"), Map("Schema" -> "testdb"))))
+    assert(actualResult.equals(List(Map("Schema" -> "information_schema"),
+      Map("Schema" -> "testdb"))))
+  }
+
+  test("test the show tables") {
+    val actualResult = prestoServer.executeQuery("SHOW TABLES")
+    assert(actualResult.equals(List(Map("Table" -> "testtable"))))
   }
-  test("test the show tables"){
-  val actualResult = prestoServer.executeQuery("SHOW TABLES")
-  assert(actualResult.equals(List(Map("Table" -> "testtable"))))
- }
 
   private def cleanUp(): Unit = {
     FileFactory.deleteFile(s"$storePath/Fact")
@@ -523,9 +554,9 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     FileFactory.createNewFile(s"$storePath/testdb/.DS_STORE")
   }
 
-  test("test the OR operator on same column"){
-    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery("SELECT BONUS FROM TESTDB.TESTTABLE WHERE" +
-      " BONUS < 600 OR BONUS > 5000 ORDER BY BONUS")
+  test("test the OR operator on same column") {
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT BONUS FROM TESTDB.TESTTABLE WHERE BONUS < 600 OR BONUS > 5000 ORDER BY BONUS")
     val expectedResult: List[Map[String, Any]] = List(
       Map("BONUS" -> java.math.BigDecimal.valueOf(500.4140).setScale(4)),
       Map("BONUS" -> java.math.BigDecimal.valueOf(500.5900).setScale(4)),
@@ -536,8 +567,9 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
   }
 
-  test("test the AND, OR operator on same column"){
-    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery("SELECT SHORTFIELD FROM TESTDB.TESTTABLE WHERE" +
+  test("test the AND, OR operator on same column") {
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT SHORTFIELD FROM TESTDB.TESTTABLE WHERE" +
       " SHORTFIELD > 4 AND (SHORTFIELD < 10 or SHORTFIELD > 15) ORDER BY SHORTFIELD")
     val expectedResult: List[Map[String, Any]] = List(
       Map("SHORTFIELD" -> 8),
@@ -545,9 +577,10 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
   }
 
-  test("test the OR operator with multiple AND on same column"){
-    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery("SELECT SHORTFIELD FROM TESTDB.TESTTABLE WHERE" +
-      " (SHORTFIELD > 1 AND SHORTFIELD < 5) OR (SHORTFIELD > 10 AND SHORTFIELD < 15) ORDER BY SHORTFIELD")
+  test("test the OR operator with multiple AND on same column") {
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT SHORTFIELD FROM TESTDB.TESTTABLE WHERE (SHORTFIELD > 1 AND SHORTFIELD < 5) " +
+      "OR (SHORTFIELD > 10 AND SHORTFIELD < 15) ORDER BY SHORTFIELD")
     val expectedResult: List[Map[String, Any]] = List(
       Map("SHORTFIELD" -> 4),
       Map("SHORTFIELD" -> 11),
@@ -555,8 +588,9 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
   }
 
-  test("test the OR, AND operator with on Different column"){
-    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery("SELECT SHORTFIELD FROM TESTDB.TESTTABLE WHERE" +
+  test("test the OR, AND operator with on Different column") {
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT SHORTFIELD FROM TESTDB.TESTTABLE WHERE" +
       " ID < 7 AND (SHORTFIELD < 5 OR SHORTFIELD > 15) ORDER BY SHORTFIELD")
     val expectedResult: List[Map[String, Any]] = List(
       Map("SHORTFIELD" -> 4),
@@ -564,14 +598,13 @@ class PrestoAllDataTypeTest extends FunSuiteLike with BeforeAndAfterAll {
     assert(actualResult.equals(expectedResult))
   }
 
-  test("test the Timestamp greaterthan expression"){
-    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery("SELECT DOB FROM TESTDB.TESTTABLE" +
-                                                                         " WHERE DOB > timestamp '2016-01-01 00:00:00.0' order by DOB")
+  test("test the Timestamp greaterthan expression") {
+    val actualResult: List[Map[String, Any]] = prestoServer.executeQuery(
+      "SELECT DOB FROM TESTDB.TESTTABLE WHERE DOB > timestamp '2016-01-01 00:00:00.0' order by DOB")
     val expectedResult: List[Map[String, Any]] = List(
-      Map("DOB" -> new Timestamp(new java.util.Date(2016-1900,1-1,14,15,7,9).getTime)),
-      Map("DOB" -> new Timestamp(new java.util.Date(2016-1900,4-1,14,15,0,9).getTime)))
+      Map("DOB" -> new Timestamp(new java.util.Date(2016 - 1900, 1 - 1, 14, 15, 7, 9).getTime)),
+      Map("DOB" -> new Timestamp(new java.util.Date(2016 - 1900, 4 - 1, 14, 15, 0, 9).getTime)))
     assert(actualResult.equals(expectedResult))
   }
 
-
-}
\ No newline at end of file
+}
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoTestNonTransactionalTableFiles.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoTestNonTransactionalTableFiles.scala
index 33e5fed..6de00d8 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoTestNonTransactionalTableFiles.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/integrationtest/PrestoTestNonTransactionalTableFiles.scala
@@ -35,8 +35,8 @@ import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
 import org.apache.carbondata.presto.server.{PrestoServer, PrestoTestUtil}
 import org.apache.carbondata.sdk.file.{CarbonWriter, Schema}
 
-
-class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAfterAll with BeforeAndAfterEach {
+class PrestoTestNonTransactionalTableFiles
+  extends FunSuiteLike with BeforeAndAfterAll with BeforeAndAfterEach {
 
   private val logger = LogServiceFactory
     .getLogService(classOf[PrestoTestNonTransactionalTableFiles].getCanonicalName)
@@ -95,11 +95,9 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
 
   private def createTableBinary = {
     prestoServer.execute("drop table if exists sdk_output.files1")
-    prestoServer
-      .execute(
-        "create table sdk_output.files1(name boolean, age int, id varbinary, height double, salary " +
-        "real) with" +
-        "(format='CARBON') ")
+    prestoServer.execute(
+      "create table sdk_output.files1(name boolean, age int, id varbinary, height double, salary " +
+        "real) with(format='CARBON') ")
   }
 
   def buildTestData(rows: Int, options: util.Map[String, String], varcharDataGen: Boolean): Any = {
@@ -173,7 +171,7 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
         i += 1
       }
       if (options != null) {
-        //Keep one valid record. else carbon data file will not generate
+        // Keep one valid record. else carbon data file will not generate
         writer
           .write(Array[String]("robot" + i,
             String.valueOf(i),
@@ -383,11 +381,12 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
     val writerPathComplex = storePath + "/sdk_output/files4"
     FileUtils.deleteDirectory(new File(writerPathComplex))
     prestoServer.execute("drop table if exists sdk_output.files4")
-    prestoServer
-      .execute(
-        "create table sdk_output.files4(stringField varchar, structField ROW(byteField tinyint, shortField SMALLINT, intField Integer, " +
-        "longField BIGINT, floatField real, doubleField DOUBLE, binaryField varbinary, dateField date, timeStampField timestamp, " +
-        "booleanField boolean, longStringField varchar, decimalField decimal(8,2), stringChildField varchar)) with(format='CARBON') ")
+    prestoServer.execute(
+      "create table sdk_output.files4(stringField varchar, structField ROW(byteField tinyint, " +
+      "shortField SMALLINT, intField Integer, longField BIGINT, floatField real, doubleField " +
+      "DOUBLE, binaryField varbinary, dateField date, timeStampField timestamp, booleanField " +
+      "boolean, longStringField varchar, decimalField decimal(8,2), stringChildField varchar)) " +
+      "with(format='CARBON') ")
 
     val imagePath = rootPath + "/sdk/sdk/src/test/resources/image/carbondatalogo.jpg"
     val bis = new BufferedInputStream(new FileInputStream(imagePath))
@@ -418,41 +417,25 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
     ("structField", "struct", fields.asJava))
 
     try {
-      val options: util.Map[String, String] = Map("bAd_RECords_action" -> "FORCE", "quotechar" -> "\"").asJava
+      val options: util.Map[String, String] =
+        Map("bAd_RECords_action" -> "FORCE", "quotechar" -> "\"").asJava
       val builder = CarbonWriter.builder()
       val writer =
         builder.outputPath(writerPathComplex)
-          .uniqueIdentifier(System.nanoTime()).withLoadOptions(options).withBlockSize(2).enableLocalDictionary(false)
+          .uniqueIdentifier(System.nanoTime())
+          .withLoadOptions(options)
+          .withBlockSize(2)
+          .enableLocalDictionary(false)
           .withCsvInput(new Schema(structType)).writtenBy("presto").build()
 
-      val array1 = Array[String]("row1",
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null)
-
-      val array2 = Array[String]("row2", "5"
-                                           + "\001" + "5"
-                                           + "\001" + "5"
-                                           + "\001" + "5"
-                                           + "\001" + "5.512"
-                                           + "\001" + "5.512"
-                                           + "\001" + binaryValue
-                                           + "\001" + "2019-03-02"
-                                           + "\001" + "2019-02-12 03:03:34"
-                                           + "\001" + "true"
-                                           + "\001" + longChar
-                                           + "\001" + "-2.2"
-                                           + "\001" + "stringName")
+      val array1 = Array[String]("row1", null, null, null, null, null, null, null, null, null,
+        null, null, null, null)
+
+      val array2 = Array[String]("row2", "5" + "\001" + "5" + "\001" + "5" + "\001" + "5" +
+                                         "\001" + "5.512" + "\001" + "5.512" + "\001" +
+                                         binaryValue + "\001" + "2019-03-02" + "\001" +
+                                         "2019-02-12 03:03:34" + "\001" + "true" + "\001" +
+                                         longChar + "\001" + "-2.2" + "\001" + "stringName")
       writer.write(array1)
       writer.write(array2)
       writer.close()
@@ -467,7 +450,7 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
 
     for(i <- 0 to 1) {
       val row = actualResult(i)("stringfield")
-      val result = actualResult(i)("structfield").asInstanceOf[java.util.Map[String,Any]]
+      val result = actualResult(i)("structfield").asInstanceOf[java.util.Map[String, Any]]
       if(row == "row1") { assert(result.get("bytefield") == null)
         assert(result.get("shortfield") == null)
         assert(result.get("intfield") == null)
@@ -539,12 +522,12 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
     import scala.collection.JavaConverters._
     FileUtils.deleteDirectory(new File(writerPathComplex))
     prestoServer.execute("drop table if exists sdk_output.files5")
-    prestoServer
-      .execute(
-        "create table sdk_output.files5(arrayByte ARRAY(tinyint), arrayShort ARRAY(smallint), arrayInt ARRAY(int), " +
-        "arrayLong ARRAY(bigint), arrayFloat ARRAY(real), arrayDouble ARRAY(double), " +
-        "arrayBinary ARRAY(varbinary), arrayDate ARRAY(date), arrayTimestamp ARRAY(timestamp), arrayBoolean ARRAY(boolean), " +
-        "arrayVarchar ARRAY(varchar), arrayDecimal ARRAY(decimal(8,2)), arrayString ARRAY(varchar), stringField varchar ) with(format='CARBON') ")
+    prestoServer.execute(
+      "create table sdk_output.files5(arrayByte ARRAY(tinyint), arrayShort ARRAY(smallint), " +
+      "arrayInt ARRAY(int), arrayLong ARRAY(bigint), arrayFloat ARRAY(real), arrayDouble ARRAY" +
+      "(double), arrayBinary ARRAY(varbinary), arrayDate ARRAY(date), arrayTimestamp ARRAY" +
+      "(timestamp), arrayBoolean ARRAY(boolean), arrayVarchar ARRAY(varchar), arrayDecimal ARRAY" +
+      "(decimal(8,2)), arrayString ARRAY(varchar), stringField varchar ) with(format='CARBON') ")
 
     val imagePath = rootPath + "/sdk/sdk/src/test/resources/image/carbondatalogo.jpg"
     val bis = new BufferedInputStream(new FileInputStream(imagePath))
@@ -587,33 +570,24 @@ class PrestoTestNonTransactionalTableFiles extends FunSuiteLike with BeforeAndAf
     val structType14 = new Field("stringField", DataTypes.STRING)
 
     try {
-      val options: util.Map[String, String] = Map("bAd_RECords_action" -> "FORCE", "quotechar" -> "\"").asJava
+      val options: util.Map[String, String] =
+        Map("bAd_RECords_action" -> "FORCE", "quotechar" -> "\"").asJava
       val builder = CarbonWriter.builder()
       val writer =
         builder.outputPath(writerPathComplex).withLoadOptions(options)
           .uniqueIdentifier(System.nanoTime()).withBlockSize(2).enableLocalDictionary(false)
-          .withCsvInput(new Schema(Array[Field](structType1,structType2,structType3,structType4,structType5,structType6,
-            structType7,structType8,structType9,structType10,structType11,structType12,structType13,structType14))).writtenBy("presto").build()
-
-      var array = Array[String](null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        null,
-        "row1")
+          .withCsvInput(new Schema(Array[Field](
+            structType1, structType2, structType3, structType4, structType5, structType6,
+            structType7, structType8, structType9, structType10, structType11, structType12,
+            structType13, structType14))).writtenBy("presto").build()
+
+      var array = Array[String](null, null, null, null, null, null, null, null, null, null,
+        null, null, null, "row1")
       writer.write(array)
       array = Array[String]("3" + "\001" + "5" + "\001" + "4",
         "4" + "\001" + "5" + "\001" + "6",
         "4",
-        "2" + "\001" + "59999999" + "\001" + "99999999999" ,
+        "2" + "\001" + "59999999" + "\001" + "99999999999",
         "5.4646" + "\001" + "5.55" + "\001" + "0.055",
         "5.46464646464" + "\001" + "5.55" + "\001" + "0.055",
         binaryValue,
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
index 05eed08..4b91c8f 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
@@ -21,10 +21,10 @@ import java.io._
 import java.nio.charset.Charset
 import java.text.SimpleDateFormat
 import java.util
-import java.util.concurrent.atomic.AtomicInteger
 import java.util.{ArrayList, Date, UUID}
+import java.util.concurrent.atomic.AtomicInteger
 
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
 import scala.collection.mutable
 
 import com.google.gson.Gson
@@ -32,23 +32,23 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.io.NullWritable
 import org.apache.hadoop.mapred.TaskAttemptID
-import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
 import org.apache.hadoop.mapreduce.{RecordReader, TaskType}
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.compression.CompressorFactory
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.fileoperations.{AtomicFileOperationFactory, AtomicFileOperations, FileWriteOperation}
+import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonMetadata, CarbonTableIdentifier}
 import org.apache.carbondata.core.metadata.converter.{SchemaConverter, ThriftWrapperSchemaConverterImpl}
 import org.apache.carbondata.core.metadata.datatype.{DataTypes, StructField}
 import org.apache.carbondata.core.metadata.encoder.Encoding
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension
 import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, CarbonTableBuilder, TableSchemaBuilder}
-import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonMetadata, CarbonTableIdentifier}
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentStatus}
-import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
+import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.core.writer.ThriftWriter
 import org.apache.carbondata.processing.loading.DataLoadExecutor
 import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants
@@ -121,8 +121,8 @@ object CarbonDataStoreCreator {
         "," +
         "true")
       loadModel.setMaxColumns("15")
-      loadModel.setCsvHeader(
-        "ID,date,country,name,phonetype,serialname,salary,bonus,monthlyBonus,dob,shortField,isCurrentEmployee")
+      loadModel.setCsvHeader("ID,date,country,name,phonetype,serialname,salary,bonus," +
+                             "monthlyBonus,dob,shortField,isCurrentEmployee")
       loadModel.setCsvHeaderColumns(loadModel.getCsvHeader.split(","))
       loadModel.setTaskNo("0")
       loadModel.setSegmentId("0")
@@ -147,11 +147,14 @@ object CarbonDataStoreCreator {
     schemaBuilder.addColumn(new StructField("phonetype", DataTypes.STRING), integer, false, false)
     schemaBuilder.addColumn(new StructField("serialname", DataTypes.STRING), integer, false, false)
     schemaBuilder.addColumn(new StructField("salary", DataTypes.DOUBLE), integer, false, false)
-    schemaBuilder.addColumn(new StructField("bonus", DataTypes.createDecimalType(10, 4)), integer, false, true)
-    schemaBuilder.addColumn(new StructField("monthlyBonus", DataTypes.createDecimalType(18, 4)), integer, false, true)
+    schemaBuilder.addColumn(new StructField("bonus", DataTypes.createDecimalType(10, 4)),
+      integer, false, true)
+    schemaBuilder.addColumn(new StructField("monthlyBonus", DataTypes.createDecimalType(18, 4)),
+      integer, false, true)
     schemaBuilder.addColumn(new StructField("dob", DataTypes.TIMESTAMP), integer, false, true)
     schemaBuilder.addColumn(new StructField("shortField", DataTypes.SHORT), integer, false, false)
-    schemaBuilder.addColumn(new StructField("isCurrentEmployee", DataTypes.BOOLEAN), integer, false, true)
+    schemaBuilder.addColumn(new StructField("isCurrentEmployee", DataTypes.BOOLEAN),
+      integer, false, true)
     schemaBuilder.tableName(absoluteTableIdentifier.getTableName)
     val schema = schemaBuilder.build()
 
@@ -212,8 +215,9 @@ object CarbonDataStoreCreator {
   private def isDictionaryDefaultMember(dims: util.List[CarbonDimension],
       dimensionSet: Array[util.List[String]],
       index: Int) = {
-    dimensionSet(index).isEmpty && dims(index).hasEncoding(Encoding.DICTIONARY) &&
-    !dims(index).hasEncoding(Encoding.DIRECT_DICTIONARY)
+    val dimensions = dims.asScala
+    dimensionSet(index).isEmpty && dimensions(index).hasEncoding(Encoding.DICTIONARY) &&
+    !dimensions(index).hasEncoding(Encoding.DIRECT_DICTIONARY)
   }
 
   /**
diff --git a/integration/spark-common-cluster-test/pom.xml b/integration/spark-common-cluster-test/pom.xml
index 12426ff..91d1a70 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>2.0.0-SNAPSHOT</version>
+    <version>2.1.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 
@@ -37,7 +37,7 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark</artifactId>
+      <artifactId>carbondata-spark_${spark.binary.version}</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
@@ -152,7 +152,7 @@
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx6g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx6g -XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
             <spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
@@ -170,7 +170,7 @@
           <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
           <junitxml>.</junitxml>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine> ${argLine} -ea -Xmx5g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m
+          <argLine> ${argLine} -ea -Xmx5g -XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
index 10e7b02..598cb7d 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
@@ -1,4 +1,3 @@
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -23,22 +22,19 @@ import org.apache.spark.sql.Row
 import org.apache.spark.sql.common.util._
 import org.apache.spark.util.SparkUtil
 import org.scalatest.BeforeAndAfterAll
+
 import org.apache.carbondata.common.constants.LoggerAction
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
-
 import org.apache.carbondata.spark.exception.ProcessMetaDataException
 
 /**
  * Test Class for AlterTableTestCase to verify all scenerios
  */
-
 class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
-         
-
-  //Check alter table using with alter command in lower case
+  // scalastyle:off lineLength
+  // Check alter table using with alter command in lower case
   test("RenameTable_001_01", Include) {
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
@@ -48,8 +44,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test2""").collect
   }
 
-
-  //Check alter table using with alter command in upper & lower case
+  // Check alter table using with alter command in upper & lower case
   test("RenameTable_001_02", Include) {
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
@@ -60,8 +55,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test2""").collect
   }
 
-
-  //Check alter table using with alter command in upper case
+  // Check alter table using with alter command in upper case
   test("RenameTable_001_03", Include) {
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
@@ -71,8 +65,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test2""").collect
   }
 
-
-  //Check alter table where target table speficifed with database name
+  // Check alter table where target table speficifed with database name
   test("RenameTable_001_04", Include) {
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
@@ -82,8 +75,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   }
 
-
-  //Check alter table run multiple times, revert back the name to original
+  // Check alter table run multiple times, revert back the name to original
   test("RenameTable_001_06", Include) {
     sql(s"""drop table if exists test2""").collect
     sql(s"""drop table if exists test1""").collect
@@ -98,8 +90,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check data load after table rename
+  // Check data load after table rename
   test("RenameTable_001_07_1", Include) {
     sql(s"""drop table if exists test2""").collect
     sql(s"""drop table if exists test1""").collect
@@ -112,8 +103,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   }
 
-
-  //Check data load after table rename
+  // Check data load after table rename
   test("RenameTable_001_07_2", Include) {
 
     checkAnswer(s"""select name from test2 where name = 'yy'""",
@@ -121,8 +111,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test2""").collect
   }
 
-
-  //Check alter table when the altered name is already present in the database
+  // Check alter table when the altered name is already present in the database
   test("RenameTable_001_08", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -135,8 +124,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test2""").collect
   }
 
-
-  //Check alter table when the altered name is given multiple times
+  // Check alter table when the altered name is given multiple times
   test("RenameTable_001_09", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -146,8 +134,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for dimension column
+  // Check delete column for dimension column
   test("DeleteCol_001_01", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, id int) STORED AS carbondata  """).collect
@@ -158,8 +145,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for measure column
+  // Check delete column for measure column
   test("DeleteCol_001_02", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -170,8 +156,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for measure and dimension column
+  // Check delete column for measure and dimension column
   test("DeleteCol_001_03", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
@@ -182,8 +167,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for multiple column
+  // Check delete column for multiple column
   test("DeleteCol_001_04", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata """).collect
@@ -194,8 +178,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for all columns
+  // Check delete column for all columns
   test("DeleteCol_001_05", Include) {
     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
     sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
@@ -203,8 +186,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for include dictionary column
+  // Check delete column for include dictionary column
   test("DeleteCol_001_06", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, id int) STORED AS carbondata """).collect
@@ -215,8 +197,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check delete column for timestamp column
+  // Check delete column for timestamp column
   test("DeleteCol_001_08", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
@@ -227,8 +208,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check the drop of added column will remove the column from table
+  // Check the drop of added column will remove the column from table
   test("DeleteCol_001_09_1", Include) {
      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
@@ -239,8 +219,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check the drop of added column will remove the column from table
+  // Check the drop of added column will remove the column from table
   test("DeleteCol_001_09_2", Include) {
     intercept[Exception] {
      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
@@ -253,8 +232,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Drop a column and add it again with a default value
+  // Drop a column and add it again with a default value
   test("DeleteCol_001_10", Include) {
      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
@@ -265,8 +243,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Drop a column and add it again with a default value
+  // Drop a column and add it again with a default value
   test("DeleteCol_001_11", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
@@ -279,20 +256,18 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check add column for multiple column adds
+  // Check add column for multiple column adds
   test("AddColumn_001_01", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (upd_time timestamp, country string)""").collect
     checkAnswer(s"""select upd_time, country from test1""",
-      Seq(Row(null,null)), "AlterTableTestCase_AddColumn_001_01")
+      Seq(Row(null, null)), "AlterTableTestCase_AddColumn_001_01")
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check add column for dimension column and add table property to set default value
+  // Check add column for dimension column and add table property to set default value
   test("AddColumn_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -304,8 +279,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
   }
 
-
-  //Check add column to add a measure column
+  // Check add column to add a measure column
   test("AddColumn_001_03", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -317,7 +291,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column to add a measure column added with dictionary include
+  // Check add column to add a measure column added with dictionary include
   test("AddColumn_001_04", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -330,7 +304,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column to add a measure column initialized with default value
+  // Check add column to add a measure column initialized with default value
   ignore("AddColumn_001_05", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -343,7 +317,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column to add a measure column initialized with default value which does not suite the data type
+  // Check add column to add a measure column initialized with default value which does not suite the data type
   test("AddColumn_001_06", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -355,7 +329,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column to add a measure column initialized with default value on a empty table
+  // Check add column to add a measure column initialized with default value on a empty table
   test("AddColumn_001_07", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -367,19 +341,19 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column to add a dim and measure column
+  // Check add column to add a dim and measure column
   test("AddColumn_001_08", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (id1 int, country string) """).collect
     checkAnswer(s"""select id1, country from test1""",
-      Seq(Row(null,null)), "AlterTableTestCase_AddColumn_001_08")
+      Seq(Row(null, null)), "AlterTableTestCase_AddColumn_001_08")
      sql(s"""drop table if exists test1""").collect
   }
 
 
-  //Check add column for measure and make it dictionary column
+  // Check add column for measure and make it dictionary column
   test("AddColumn_001_09", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""create table test1 (name string) STORED AS carbondata""").collect
@@ -391,19 +365,19 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column to add columns and exclude the dim col from dictionary
+  // Check add column to add columns and exclude the dim col from dictionary
   test("AddColumn_001_10", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx'""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (upd_time timestamp, country string) """).collect
     checkAnswer(s"""select country, upd_time from test1""",
-      Seq(Row(null,null)), "AlterTableTestCase_AddColumn_001_10")
+      Seq(Row(null, null)), "AlterTableTestCase_AddColumn_001_10")
      sql(s"""drop table if exists test1""").collect
   }
 
 
-  //Check add column to add a timestamp column
+  // Check add column to add a timestamp column
   test("AddColumn_001_11", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -415,7 +389,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column with option default value is given for an existing column
+  // Check add column with option default value is given for an existing column
   test("AddColumn_001_14", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
@@ -427,7 +401,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check alter column for small decimal to big decimal
+  // Check alter column for small decimal to big decimal
   test("AlterData_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
@@ -440,7 +414,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check drop table after table rename using new name
+  // Check drop table after table rename using new name
   test("DropTable_001_01", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
@@ -450,7 +424,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check drop table after table rename using old name
+  // Check drop table after table rename using old name
   test("DropTable_001_02", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
@@ -463,7 +437,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check drop table after table rename using new name, after table load
+  // Check drop table after table rename using new name, after table load
   test("DropTable_001_03", Include) {
      sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1.2""").collect
@@ -474,7 +448,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check drop table after alter table name, using new name when table is empty
+  // Check drop table after alter table name, using new name when table is empty
   test("DropTable_001_04", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
@@ -484,7 +458,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check drop table when table is altered by adding columns
+  // Check drop table when table is altered by adding columns
   test("DropTable_001_05", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""create table test1 (name string, id int) STORED AS carbondata  """).collect
@@ -496,7 +470,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check schema changes and carbon dictionary additions for alter table when new column added
+  // Check schema changes and carbon dictionary additions for alter table when new column added
   test("StorageFi_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (country string, name string) STORED AS carbondata """).collect
@@ -506,43 +480,43 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check dictionary cache is loaded with new added column when query is run
+  // Check dictionary cache is loaded with new added column when query is run
   ignore("Dictionary_001_01", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id decimal(3,2),country string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1.22,'china'""").collect
    sql(s"""alter table test1 add columns (price decimal(10,4)) tblproperties('DEFAULT.VALUE.price'='11.111')""").collect
     checkAnswer(s"""select * from test1""",
-      Seq(Row("xx",1.22,"china",11.1110)), "AlterTableTestCase_Dictionary_001_01")
+      Seq(Row("xx", 1.22, "china", 11.1110)), "AlterTableTestCase_Dictionary_001_01")
      sql(s"""drop table if exists test1""").collect
   }
 
 
-  //Check if dropped column is removed from driver side LRU cache
+  // Check if dropped column is removed from driver side LRU cache
   test("Dictionary_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id decimal(3,2),country string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1.22,'china'""").collect
    sql(s"""alter table test1 drop columns (country)""").collect
     checkAnswer(s"""select * from test1""",
-      Seq(Row("xx",1.22)), "AlterTableTestCase_Dictionary_001_02")
+      Seq(Row("xx", 1.22)), "AlterTableTestCase_Dictionary_001_02")
      sql(s"""drop table if exists test1""").collect
   }
 
 
-  //Check if dropped column is removed from driver side LRU cache at driver side
+  // Check if dropped column is removed from driver side LRU cache at driver side
   test("Dictionary_001_03", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id decimal(3,2),country string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1.22,'china'""").collect
    sql(s"""alter table test1 drop columns(country)""").collect
     checkAnswer(s"""select * from test1""",
-      Seq(Row("xx",1.22)), "AlterTableTestCase_Dictionary_001_03")
+      Seq(Row("xx", 1.22)), "AlterTableTestCase_Dictionary_001_03")
      sql(s"""drop table if exists test1""").collect
   }
 
 
-  //Check table load works fine after alter table name
+  // Check table load works fine after alter table name
   test("Dataload_001_01", Include) {
      sql(s"""drop table if exists t_carbn01t""").collect
    sql(s"""drop table if exists t_carbn01""").collect
@@ -554,7 +528,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check table load into old table after alter table name
+  // Check table load into old table after alter table name
   test("Dataload_001_02", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -567,7 +541,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check table load works fine after alter table name
+  // Check table load works fine after alter table name
   test("Dataload_001_03", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -578,7 +552,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check table load works fine after alter table name
+  // Check table load works fine after alter table name
   test("Dataload_001_04", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -591,7 +565,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check table load works fine after alter table name
+  // Check table load works fine after alter table name
   test("Dataload_001_05", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -604,7 +578,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check if alter table(add column) is supported when data load is happening
+  // Check if alter table(add column) is supported when data load is happening
   test("Concurrent_alter_001_01", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -615,7 +589,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check if alter table(delete column) is supported when data load is happening
+  // Check if alter table(delete column) is supported when data load is happening
   test("Concurrent_alter_001_02", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -625,7 +599,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check if alter table(change column) is supported when data load is happening
+  // Check if alter table(change column) is supported when data load is happening
   test("Concurrent_alter_001_03", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -635,7 +609,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check if alter table(rename) is supported when data load is happening
+  // Check if alter table(rename) is supported when data load is happening
   test("Concurrent_alter_001_04", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -645,7 +619,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check table insert works fine after alter table to add a column
+  // Check table insert works fine after alter table to add a column
   test("Insertint_001_03", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
     sql(s"""drop table if exists default.t_carbn02""").collect
@@ -661,7 +635,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check table insert works fine after alter table to add a column
+  // Check table insert works fine after alter table to add a column
   test("Insertint_001_04", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
     sql(s"""drop table if exists default.t_carbn02""").collect
@@ -677,7 +651,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //check table insert works fine after alter table to drop columns
+  // Check table insert works fine after alter table to drop columns
   test("Insertint_001_05", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test2 (country string, name string, state_id int,id int) STORED AS carbondata """).collect
@@ -692,7 +666,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check show segments on old table After altering the Table name.
+  // Check show segments on old table After altering the Table name.
   test("Showsegme_001_01", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (country string, id int) STORED AS carbondata""").collect
@@ -703,7 +677,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on old table after altering the table name
+  // Check vertical compaction on old table after altering the table name
   test("Compaction_001_01", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
@@ -717,7 +691,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on new table when all segments are created before alter table name.
+  // Check vertical compaction on new table when all segments are created before alter table name.
   test("Compaction_001_02", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
@@ -733,7 +707,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on new table when some of the segments are created after altering the table name
+  // Check vertical compaction on new table when some of the segments are created after altering the table name
   test("Compaction_001_03", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
@@ -749,7 +723,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on new table after altering the table name multiple times and and segments created after alter
+  // Check vertical compaction on new table after altering the table name multiple times and and segments created after alter
   test("Compaction_001_04", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
@@ -767,7 +741,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction(major) on new table name when part of the segments are created before altering the table name
+  // Check vertical compaction(major) on new table name when part of the segments are created before altering the table name
   test("Compaction_001_05", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
@@ -783,7 +757,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction when all segments are created before drop column, check dropped column is not used in the compation
+  // Check vertical compaction when all segments are created before drop column, check dropped column is not used in the compation
   test("Compaction_001_06", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
@@ -800,7 +774,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction when some of the segments are created before drop column, check dropped column is not used in the compation
+  // Check vertical compaction when some of the segments are created before drop column, check dropped column is not used in the compation
   test("Compaction_001_07", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
@@ -817,7 +791,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction for multiple drop column, check dropped column is not used in the compation
+  // Check vertical compaction for multiple drop column, check dropped column is not used in the compation
   test("Compaction_001_08", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
@@ -835,7 +809,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on altered table for column add, when all segments crreated before table alter. Ensure added column in the compacted segment
+  // Check vertical compaction on altered table for column add, when all segments crreated before table alter. Ensure added column in the compacted segment
   test("Compaction_001_09", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
@@ -851,7 +825,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on altered table for column add, when some of the segments crreated before table alter. Ensure added column in the compacted segment
+  // Check vertical compaction on altered table for column add, when some of the segments crreated before table alter. Ensure added column in the compacted segment
   test("Compaction_001_10", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1(name string) STORED AS carbondata""").collect
@@ -866,7 +840,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on multiple altered table for column add, when some of the segments crreated after table alter. Ensure added column in the compacted segment
+  // Check vertical compaction on multiple altered table for column add, when some of the segments crreated after table alter. Ensure added column in the compacted segment
   test("Compaction_001_11", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1(name string) STORED AS carbondata""").collect
@@ -883,7 +857,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check vertical compaction on altered table for change column datatype, when some of the segments crreated after table alter. Ensure added column in the compacted segment
+  // Check vertical compaction on altered table for change column datatype, when some of the segments crreated after table alter. Ensure added column in the compacted segment
   test("Compaction_001_12", Include) {
     sql(s"""drop table if exists default.test1""").collect
      sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
@@ -893,7 +867,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
    sql(s"""insert into test1 select 'xx2',2999999999""").collect
    sql(s"""alter table test1 compact 'minor'""").collect
     checkAnswer(s"""select id from test1""",
-      Seq(Row(1),Row(2), Row(2999999999L)), "AlterTableTestCase_Compaction_001_12")
+      Seq(Row(1), Row(2), Row(2999999999L)), "AlterTableTestCase_Compaction_001_12")
      sql(s"""drop table if exists test1""").collect
   }
 
@@ -906,7 +880,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check bad record locaion isnot changed when table name is altered
+  // Check bad record locaion isnot changed when table name is altered
   test("BadRecords_001_01", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -917,7 +891,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check bad record locaion isnot changed when table name is altered
+  // Check bad record locaion isnot changed when table name is altered
   test("BadRecords_001_02", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
      sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
@@ -927,7 +901,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check for bad record handling while latering the table if added column is set with default value which is a bad record
+  // Check for bad record handling while latering the table if added column is set with default value which is a bad record
   test("BadRecords_001_03", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -935,12 +909,12 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
    sql(s"""insert into test1 select 'xx',12""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (id2 int) TBLPROPERTIES('include_dictionary'='id2','DEFAULT.VALUE.id2'='China')""").collect
     checkAnswer(s"""select * from test1 where id = 1""",
-      Seq(Row("xx",1,null)), "AlterTableTestCase_BadRecords_001_03")
+      Seq(Row("xx", 1, null)), "AlterTableTestCase_BadRecords_001_03")
      sql(s"""drop table if exists test1""").collect
   }
 
 
-  //Check delete segment is not allowed on old table name when table name is altered
+  // Check delete segment is not allowed on old table name when table name is altered
   test("DeleteSeg_001_01", Include) {
     intercept[Exception] {
       sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -953,7 +927,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check delete segment is allowed on new table name when table name is altered
+  // Check delete segment is allowed on new table name when table name is altered
   test("DeleteSeg_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
@@ -962,12 +936,12 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
    sql(s"""alter table test1 rename to test2""").collect
    sql(s"""delete from table test2 where segment.id in (0)""").collect
     checkAnswer(s"""Select * from test2""",
-      Seq(Row("xx",12)), "AlterTableTestCase_DeleteSeg_001_02")
+      Seq(Row("xx", 12)), "AlterTableTestCase_DeleteSeg_001_02")
      sql(s"""drop table if exists test2""").collect
   }
 
 
-  //Check alter the table name,alter the table name again with first name and fire Select query
+  // Check alter the table name,alter the table name again with first name and fire Select query
   test("AlterTable-001-AltersameTablename-001-TC001", Include) {
      sql(s"""drop table  if exists uniqdata""").collect
    sql(s"""drop table  if exists uniqdata1""").collect
@@ -982,7 +956,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check select query after alter the int to Bigint and decimal Lower Precision to higher precision
+  // Check select query after alter the int to Bigint and decimal Lower Precision to higher precision
   test("AlterTable-007-selectquery-001-TC002", Include) {
      sql(s"""CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
@@ -993,7 +967,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check select query after alter from lower to higher precision
+  // Check select query after alter from lower to higher precision
   test("AlterTable-008-selectquery-001-TC003", Include) {
      sql(s"""CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
@@ -1004,7 +978,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check add column on Decimal,Timestamp,int,string,Bigint
+  // Check add column on Decimal,Timestamp,int,string,Bigint
   test("AlterTable-002-001-TC-004", Include) {
      sql(s"""drop table if exists uniqdata59""").collect
    sql(s"""CREATE TABLE uniqdata59 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
@@ -1020,7 +994,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
         sql("alter table alter_hive add columns(add string)")
       }
       assert(exception.getMessage.contains("Unsupported alter operation on hive table"))
-    } else if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+    } else if (SparkUtil.isSparkVersionXAndAbove("2.2")) {
       sql("alter table alter_hive add columns(add string)")
       sql("alter table alter_hive add columns (var map<string, string>)")
       sql("insert into alter_hive select 'abc','banglore',map('age','10','birth','2020')")
@@ -1034,7 +1008,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Alter table add column for hive partitioned table for spark version above 2.1") {
     sql("drop table if exists alter_hive")
     sql("create table alter_hive(name string) stored as rcfile partitioned by (dt string)")
-    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+    if (SparkUtil.isSparkVersionXAndAbove("2.2")) {
       sql("alter table alter_hive add columns(add string)")
       sql("alter table alter_hive add columns (var map<string, string>)")
       sql("alter table alter_hive add columns (loves array<string>)")
@@ -1054,7 +1028,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Alter table add complex column for hive table for spark version above 2.1") {
     sql("drop table if exists alter_hive")
     sql("create table alter_hive(name string) stored as rcfile")
-    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+    if (SparkUtil.isSparkVersionXAndAbove("2.2")) {
       sql("alter table alter_hive add columns (add1 string comment 'comment1')")
       sql("alter table alter_hive add columns (add2 decimal)")
       sql("alter table alter_hive add columns (add3 decimal(20,2))")
@@ -1117,7 +1091,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll: Unit = {
-    //Reverting to old
+    // Reverting to old
     prop.addProperty("carbon.horizontal.compaction.enable", p1)
     prop.addProperty("carbon.horizontal.update.compaction.threshold", p2)
     prop.addProperty("carbon.horizontal.delete.compaction.threshold", p3)
@@ -1127,4 +1101,5 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists test2")
     sql("drop table if exists test1")
   }
-}
\ No newline at end of file
+  // scalastyle:on lineLength
+}
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
index 90bb7a5..3e81d85 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
@@ -1,4 +1,3 @@
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -27,9 +26,9 @@ import org.scalatest.BeforeAndAfterAll
  */
 
 class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
-         
-  
-  //Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV without header and specify headers in command
+
+  // scalastyle:off lineLength
+  // Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV without header and specify headers in command
   test("BadRecords-001_PTS001_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest1 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2.csv' into table badrecordtest1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
@@ -40,7 +39,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV with  header and specify header in command
+  // Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV with  header and specify header in command
   test("BadRecords-001_PTS002_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest2 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest2 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
@@ -51,7 +50,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV with  header and without specify header in command
+  // Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV with  header and without specify header in command
   test("BadRecords-001_PTS003_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest3 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest3 OPTIONS('FILEHEADER'='ID,CUST_ID,cust_name','DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
@@ -62,7 +61,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create table and load the data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true with CSV has incomplete/wrong data
+  // Create table and load the data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true with CSV has incomplete/wrong data
   test("BadRecords-001_PTS004_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest4 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test4.csv' into table badrecordtest4 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
@@ -73,7 +72,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create table and load data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true for data types with boundary values of data type
+  // Create table and load data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true for data types with boundary values of data type
   test("BadRecords-001_PTS005_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest5 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test5.csv' into table badrecordtest5 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
@@ -84,7 +83,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true  from CSV with' Delimiters , Quote characters '
+  // Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true  from CSV with' Delimiters , Quote characters '
   test("BadRecords-001_PTS006_TC001", Include) {
     sql(s"""drop table if exists abadrecordtest1""").collect
     sql(s"""CREATE TABLE abadrecordtest1 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
@@ -96,7 +95,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //create the table and load the data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true column value with separator (/ , \ ,!,\001)
+  // Create the table and load the data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true column value with separator (/ , \ ,!,\001)
   test("BadRecords-001_PTS007_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest6 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
     intercept[Exception] {
@@ -111,7 +110,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create the table and Load from Hive table
+  // Create the table and Load from Hive table
   test("BadRecords-001_PTS008_TC001", Include) {
     sql(s"""drop table if exists badrecordTest7""").collect
     sql(s"""drop table if exists hivetable7""").collect
@@ -126,7 +125,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create table and Insert into Select for destination carbon table from source carbon/hive/parquet table
+  // Create table and Insert into Select for destination carbon table from source carbon/hive/parquet table
   test("BadRecords-001_PTS015_TC001", Include) {
     sql(s"""drop table if exists badrecordTest9""").collect
     sql(s"""drop table if exists hivetable9""").collect
@@ -141,7 +140,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Show segments for table when data loading having parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true
+  // Show segments for table when data loading having parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true
   test("BadRecords-001_PTS020_TC001", Include) {
      sql(s"""CREATE TABLE badrecordTest13 (ID int,CUST_ID int,cust_name string) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordTest13 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
@@ -150,7 +149,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Create table and Load data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true  for date and char types using vectorized reader parameters
+  // Create table and Load data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true  for date and char types using vectorized reader parameters
   test("BadRecords-001_PTS012_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest14 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest14 OPTIONS('FILEHEADER'='ID,CUST_ID,cust_name','DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE')""").collect
@@ -160,7 +159,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ""(empty in double quote)
+  // Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ""(empty in double quote)
   test("BadRecords-001_PTS021_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest15 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/doubleqoute.csv' into table badrecordtest15 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
@@ -170,7 +169,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
 
-  //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having  a,  insufficient column
+  // Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having  a,  insufficient column
   test("BadRecords-001_PTS022_TC001", Include) {
     sql(s"""drop table if exists badrecordTest16""").collect
      sql(s"""CREATE TABLE badrecordtest16 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
@@ -180,18 +179,17 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists badrecordTest16""").collect
   }
 
-
-  //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ‘’ (empty in single quote)
+  // Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having '' (empty in single quote)
   test("BadRecords-001_PTS023_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest17 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test6.csv' into table badrecordtest17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'="'",'is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
+    sql(s"""CREATE TABLE badrecordtest17 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
+    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test6.csv' into table badrecordtest17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'="'",'is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordTest17""",
       Seq(Row(3)), "BadRecordTestCase-BadRecords-001_PTS023_TC001")
-     sql(s"""drop table if exists badrecordTest17""").collect
+    sql(s"""drop table if exists badrecordTest17""").collect
   }
 
 
-  //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ,(empty comma)
+  // Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ,(empty comma)
   test("BadRecords-001_PTS024_TC001", Include) {
      sql(s"""CREATE TABLE badrecordtest18 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/emptyComma.csv' into table badrecordtest18 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
@@ -199,5 +197,5 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
       Seq(Row(1)), "BadRecordTestCase-BadRecords-001_PTS024_TC001")
      sql(s"""drop table if exists badrecordTest18""").collect
   }
-
-}
\ No newline at end of file
+  // scalastyle:on lineLength
+}
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterIndexTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterIndexTestCase.scala
index b85dd47..2b1fe9e 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterIndexTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterIndexTestCase.scala
@@ -25,7 +25,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
 class BloomFilterIndexTestCase extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
-
+  // scalastyle:off lineLength
   override protected def beforeAll(): Unit = {
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
       "yyyy-MM-dd")
@@ -336,4 +336,5 @@ class BloomFilterIndexTestCase extends QueryTest with BeforeAndAfterEach with Be
     sql(s"DROP TABLE IF EXISTS $tableName1")
     sql(s"DROP TABLE IF EXISTS $tableName2")
   }
+  // scalastyle:on lineLength
 }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
index 05e5560..ee18fb7 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
@@ -1,4 +1,3 @@
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -25,7 +24,7 @@ import scala.collection.mutable
 
 import org.apache.avro.file.DataFileWriter
 import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
-import org.apache.avro.io.{DecoderFactory, Encoder}
+import org.apache.avro.io.DecoderFactory
 import org.apache.commons.io.FileUtils
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.common.util.QueryTest
@@ -45,7 +44,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
   val filePath = TestQueryExecutor.integrationPath + "/spark/src/test/resources"
   val writerPath =
     s"${ resourcesPath }" + "/SparkCarbonFileFormat/WriterOutputComplex/"
-
+  // scalastyle:off lineLength
   override def beforeAll(): Unit = {
     FileUtils.deleteDirectory(new File(writerPath))
     sql("DROP TABLE IF EXISTS complexcarbontable")
@@ -191,7 +190,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql("select count(*) from test"), Seq(Row(100)))
   }
 
-  //check projection pushdown with complex- STRUCT data type
+  // check projection pushdown with complex- STRUCT data type
   test("test Complex_DataType-005") {
     sql("DROP TABLE IF EXISTS complexcarbontable")
     sql(
@@ -291,7 +290,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
       Seq(Row(1, Row(1, "abc", mutable.WrappedArray.make(Array(1, null, null))))))
   }
 
-  //check create table with complex double and insert bigger value and check
+  // check create table with complex double and insert bigger value and check
   test("test Complex_DataType-009") {
     sql("Drop table if exists complexcarbontable")
     sql(
@@ -314,31 +313,31 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
   test("test Complex_DataType-010") {
     val mySchema =
       """ {
-        |	"name": "address",
-        |	"type": "record",
-        |	"fields": [
-        |		{
-        |			"name": "name",
-        |			"type": "string"
-        |		},
-        |		{
-        |			"name": "age",
-        |			"type": "int"
-        |		},
-        |		{
+        | "name": "address",
+        | "type": "record",
+        | "fields": [
+        |  {
+        |   "name": "name",
+        |   "type": "string"
+        |  },
+        |  {
+        |   "name": "age",
+        |   "type": "int"
+        |  },
+        |  {
         |   "name" :"my_address",
         |   "type" :{
-        |							"name": "my_address",
-        |							"type": "record",
-        |							"fields": [
+        |       "name": "my_address",
+        |       "type": "record",
+        |       "fields": [
         |               {
-        |									"name": "Temperaturetest",
-        |									"type": "double"
-        |								}
-        |							]
+        |         "name": "Temperaturetest",
+        |         "type": "double"
+        |        }
+        |       ]
         |       }
-        |			}
-        |	]
+        |   }
+        | ]
         |} """.stripMargin
 
     val jsonvalue =
@@ -382,4 +381,5 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
       writer.close()
     }
   }
+  // scalastyle:on lineLength
 }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala
index 659205a..a0cb707 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala
@@ -1,4 +1,3 @@
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -18,10 +17,9 @@
 
 package org.apache.carbondata.cluster.sdv.generated
 
-import org.apache.spark.sql.common.util._
 import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util._
 import org.apache.spark.sql.test.util.QueryTest
-
 import org.scalatest.BeforeAndAfterAll
 
 /**
@@ -29,9 +27,9 @@ import org.scalatest.BeforeAndAfterAll
  */
 
 class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
-         
 
-  //Check create table as select with select from same table name when table exists
+  // scalastyle:off lineLength
+  // Check create table as select with select from same table name when table exists
   test("CreateTableAsSelect_001_01", Include) {
    sql("drop table if exists ctas_same_table_name").collect
    sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED AS carbondata").collect
@@ -40,7 +38,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
    }
   }
 
-  //Check create table as select with select from same table name when table does not exists
+  // Check create table as select with select from same table name when table does not exists
   test("CreateTableAsSelect_001_02", Include) {
     sql("drop table if exists ctas_same_table_name").collect
     intercept[Exception] {
@@ -48,7 +46,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     }
   }
 
-  //Check create table as select with select from same table name with if not exists clause
+  // Check create table as select with select from same table name with if not exists clause
   test("CreateTableAsSelect_001_03", Include) {
     sql("drop table if exists ctas_same_table_name").collect
     sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED AS carbondata").collect
@@ -56,42 +54,42 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     assert(true)
   }
 
-  //Check create table as select with select from another carbon table
+  // Check create table as select with select from another carbon table
   test("CreateTableAsSelect_001_04", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_carbon").collect
     sql("create table ctas_select_carbon STORED AS carbondata as select * from carbon_ctas_test").collect
     checkAnswer(sql("select * from ctas_select_carbon"), sql("select * from carbon_ctas_test"))
   }
 
-  //Check create table as select with select from another parquet table
+  // Check create table as select with select from another parquet table
   test("CreateTableAsSelect_001_05", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_parquet").collect
     sql("create table ctas_select_parquet STORED AS carbondata as select * from parquet_ctas_test").collect
     checkAnswer(sql("select * from ctas_select_parquet"), sql("select * from parquet_ctas_test"))
   }
 
-  //Check test create table as select with select from another hive/orc table
+  // Check test create table as select with select from another hive/orc table
   test("CreateTableAsSelect_001_06", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_orc").collect
     sql("create table ctas_select_orc STORED AS carbondata as select * from orc_ctas_test").collect
     checkAnswer(sql("select * from ctas_select_orc"), sql("select * from orc_ctas_test"))
   }
 
-  //Check create table as select with where clause in select from carbon table that returns data
+  // Check create table as select with where clause in select from carbon table that returns data
   test("CreateTableAsSelect_001_07", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
     sql("create table ctas_select_where_carbon STORED AS carbondata as select * from carbon_ctas_test where key=100").collect
     checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test where key=100"))
   }
 
-  //Check create table as select with where clause in select from carbon table that does not return data
+  // Check create table as select with where clause in select from carbon table that does not return data
   test("CreateTableAsSelect_001_08", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
     sql("create table ctas_select_where_carbon STORED AS carbondata as select * from carbon_ctas_test where key=300").collect
     checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test where key=300"))
   }
 
-  //Check create table as select with where clause in select from carbon table and load again
+  // Check create table as select with where clause in select from carbon table and load again
   test("CreateTableAsSelect_001_09", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
     sql("create table ctas_select_where_carbon STORED AS carbondata as select * from carbon_ctas_test where key=100").collect
@@ -99,28 +97,28 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test"))
   }
 
-  //Check create table as select with where clause in select from parquet table
+  // Check create table as select with where clause in select from parquet table
   test("CreateTableAsSelect_001_10", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_parquet").collect
     sql("create table ctas_select_where_parquet STORED AS carbondata as select * from parquet_ctas_test where key=100").collect
     checkAnswer(sql("select * from ctas_select_where_parquet"), sql("select * from parquet_ctas_test where key=100"))
   }
 
-  //Check create table as select with where clause in select from hive/orc table
+  // Check create table as select with where clause in select from hive/orc table
   test("CreateTableAsSelect_001_11", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_orc").collect
     sql("create table ctas_select_where_orc STORED AS carbondata as select * from orc_ctas_test where key=100").collect
     checkAnswer(sql("select * from ctas_select_where_orc"), sql("select * from orc_ctas_test where key=100"))
   }
 
-  //Check create table as select with select directly having the data
+  // Check create table as select with select directly having the data
   test("CreateTableAsSelect_001_12", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_direct_data").collect
     sql("create table ctas_select_direct_data STORED AS carbondata as select 300,'carbondata'").collect
-    checkAnswer(sql("select * from ctas_select_direct_data"), Seq(Row(300,"carbondata")))
+    checkAnswer(sql("select * from ctas_select_direct_data"), Seq(Row(300, "carbondata")))
   }
 
-  //Check create table as select with select from another carbon table with more data
+  // Check create table as select with select from another carbon table with more data
   test("CreateTableAsSelect_001_13", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_hugedata1").collect
     sql("DROP TABLE IF EXISTS ctas_select_hugedata2").collect
@@ -132,7 +130,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS ctas_select_hugedata2").collect
   }
 
-  //Check create table as select with where clause in select from parquet table that does not return data
+  // Check create table as select with where clause in select from parquet table that does not return data
   test("CreateTableAsSelect_001_14", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_parquet").collect
     sql(
@@ -146,7 +144,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
       sql("SELECT * FROM parquet_ctas_test where key=300"))
   }
 
-  //Check create table as select with where clause in select from hive/orc table that does not return data
+  // Check create table as select with where clause in select from hive/orc table that does not return data
   test("CreateTableAsSelect_001_15", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_orc").collect
     sql(
@@ -198,4 +196,5 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS ctas_select_hugedata1")
     sql("DROP TABLE IF EXISTS ctas_select_hugedata2")
   }
-}
\ No newline at end of file
+  // scalastyle:on lineLength
+}
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
index 5186699..735c6b6 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
@@ -23,7 +23,7 @@ import org.scalatest.BeforeAndAfterAll
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 
 class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAfterAll {
-
+  // scalastyle:off lineLength
   override protected def beforeAll(): Unit = {
     sql("DROP TABLE IF EXISTS LOCAL1")
   }
@@ -2085,4 +2085,5 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
   override protected def afterAll(): Unit = {
     sql("DROP TABLE IF EXISTS LOCAL1")
   }
+  // scalastyle:on lineLength
 }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
index d6efd46..9f4760c 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
@@ -1,4 +1,3 @@
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -20,19 +19,20 @@ package org.apache.carbondata.cluster.sdv.generated
 
 import java.sql.Timestamp
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.common.util._
 import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach}
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
 /**
  * Test Class for DataLoadingIUDTestCase to verify all scenerios
  */
-
-class DataLoadingIUDTestCase extends QueryTest with BeforeAndAfterAll with BeforeAndAfter with BeforeAndAfterEach {
-
-  override def beforeAll {
+class DataLoadingIUDTestCase
+  extends QueryTest with BeforeAndAfterAll with BeforeAndAfter with BeforeAndAfterEach {
+  // scalastyle:off lineLength
+  override def beforeAll: Unit = {
     sql("use default").collect
     sql("drop table if exists t_carbn02").collect
     sql("drop table if exists t_carbn01").collect
@@ -78,3605 +78,3608 @@ class DataLoadingIUDTestCase extends QueryTest with BeforeAndAfterAll with Befor
   }
 
 
-//NA
-test("IUD-01-01-01_001-001", Include) {
-   sql("create table T_Hive1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(50), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE) row format delimited fields terminated by ',' collection items terminated by '$'")
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive10.csv' overwrite into table T_Hive1""").collect
- sql("create table T_Hive6(Item_code STRING, Sub_item_cd ARRAY<string>)row format delimited fields terminated by ',' collection items terminated by '$'")
- sql(s"""load data inpath '$resourcesPath/Data/InsertData/T_Hive1_hive11.csv' overwrite into table T_Hive6""").collect
- sql(s"""create table t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into t_carbn02 select * from default.t_carbn01b limit 4""").collect
-  checkAnswer(s"""select count(*) from t_carbn01b""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-001")
+  // NA
+  test("IUD-01-01-01_001-001", Include) {
+     sql("create table T_Hive1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(50), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE) row format delimited fields terminated by ',' collection items terminated by '$'")
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive10.csv' overwrite into table T_Hive1""").collect
+   sql("create table T_Hive6(Item_code STRING, Sub_item_cd ARRAY<string>)row format delimited fields terminated by ',' collection items terminated by '$'")
+   sql(s"""load data inpath '$resourcesPath/Data/InsertData/T_Hive1_hive11.csv' overwrite into table T_Hive6""").collect
+   sql(s"""create table t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into t_carbn02 select * from default.t_carbn01b limit 4""").collect
+    checkAnswer(s"""select count(*) from t_carbn01b""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-001")
 
-}
-       
-
-//Check for update Carbon table using a data value
-test("IUD-01-01-01_001-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status, profit) = ('YES',1) where active_status = 'TRUE'""").collect
-  checkAnswer(s"""select active_status,profit from default.t_carbn01  where active_status='YES' group by active_status,profit""",
-    Seq(Row("YES",1.00)), "DataLoadingIUDTestCase_IUD-01-01-01_001-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column where it was udpated before
-test("IUD-01-01-01_001-02", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = ('YES') where active_status = 'TRUE'""").collect
- sql(s"""update default.t_carbn01  set (active_status) = ('NO') where active_status = 'YES'""").collect
-  checkAnswer(s"""select active_status,profit from default.t_carbn01  where active_status='NO' group by active_status,profit""",
-    Seq(Row("NO",2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_001-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column without giving values in semi quote
-test("IUD-01-01-01_001-03", Include) {
-  intercept[Exception] {
-   sql(s"""drop table IF EXISTS default.t_carbn01""").collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = (NO) """).collect
-    sql(s"""NA""").collect
-    
-  }
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column using numeric value
-test("IUD-01-01-01_001-04", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = (234530508098098098080)""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
-    Seq(Row("234530508098098098080")), "DataLoadingIUDTestCase_IUD -01-01-01_001-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column using numeric value in single quote
-test("IUD-01-01-01_001-05", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = ('234530508098098098080')""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
-    Seq(Row("234530508098098098080")), "DataLoadingIUDTestCase_IUD -01-01-01_001-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column using decimal value
-test("IUD-01-01-01_001-06", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = (2.55860986095689088)""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
-    Seq(Row("2.55860986095689088")), "DataLoadingIUDTestCase_IUD-01 -01-01_001-06")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column using decimal value
-test("IUD-01-01-01_001-07", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = ('2.55860986095689088')""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
-    Seq(Row("2.55860986095689088")), "DataLoadingIUDTestCase_IUD-01 -01-01_001-07")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column using string value which is having special characters
-test("IUD-01-01-01_001-11", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status) = ('fdfdskflksdf#?…..fdffs')""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
-    Seq(Row("fdfdskflksdf#?…..fdffs")), "DataLoadingIUDTestCase_IUD-01-01-01_001-11")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using a data value on a string column using array value having ')'
-//test("IUD-01-01-01_001-12", Include) {
-//   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-// sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-// sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-// sql(s"""update default.t_carbn01  set (active_status) = ('abd$asjdh$adasj$l;sdf$*)$*)(&^)')""").collect
-//  checkAnswer(s"""select count(*) from t_carbn01b""",
-//    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-12")
-//   sql(s"""drop table default.t_carbn01  """).collect
-//}
-       
-
-//Check for update Carbon table for a column where column  name is mentioned incorrectly
-test("IUD-01-01-01_001-14", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set (item_status_cd)  = ('10')""").collect
-    sql(s"""NA""").collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a numeric value column
-test("IUD-01-01-01_001-15", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd)  = (10)""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-15")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a numeric value column in single quote
-test("IUD-01-01-01_001-16", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd)  = ('10')""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-16")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a numeric value column using string value
-test("IUD-01-01-01_001-17", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set (item_type_cd)  = ('Orange')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value
+  test("IUD-01-01-01_001-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status, profit) = ('YES',1) where active_status = 'TRUE'""").collect
+    checkAnswer(s"""select active_status,profit from default.t_carbn01  where active_status='YES' group by active_status,profit""",
+      Seq(Row("YES", 1.00)), "DataLoadingIUDTestCase_IUD-01-01-01_001-01")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a numeric value column using decimal value
-test("IUD-01-01-01_001-18", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set (item_type_cd)  = ('10.11')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column where it was udpated before
+  test("IUD-01-01-01_001-02", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = ('YES') where active_status = 'TRUE'""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = ('NO') where active_status = 'YES'""").collect
+    checkAnswer(s"""select active_status,profit from default.t_carbn01  where active_status='NO' group by active_status,profit""",
+      Seq(Row("NO", 2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_001-02")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a numeric Int value column using large numeric value
-test("IUD-01-01-01_001-19", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd)  = (2147483647)""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
-    Seq(Row(2147483647)), "DataLoadingIUDTestCase_IUD-01-01-01_001-19")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a numeric Int value column using large numeric negative value
-test("IUD-01-01-01_001-20", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd)  = (-2147483648)""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
-    Seq(Row(-2147483648)), "DataLoadingIUDTestCase_IUD-01-01-01_001-20")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a numeric Int value column using large numeric value which is beyond 32 bit
-test("IUD-01-01-01_001-21", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set (item_type_cd)  = (-2147483649)""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column without giving values in semi quote
+  test("IUD-01-01-01_001-03", Include) {
+    intercept[Exception] {
+     sql(s"""drop table IF EXISTS default.t_carbn01""").collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = (NO) """).collect
+      sql(s"""NA""").collect
+
+    }
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a numeric BigInt value column using large numeric value which is at the boundary of 64 bit
-test("IUD-01-01-01_001-22", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_price)  = (9223372036854775807)""").collect
-  checkAnswer(s"""select sell_price from default.t_carbn01  group by sell_price""",
-    Seq(Row(9223372036854775807L)), "DataLoadingIUDTestCase_IUD-01-01-01_001-22")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a decimal value column using decimal value
-test("IUD-01-01-01_001-23", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit) = (1.11)""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
-    Seq(Row(1.11)), "DataLoadingIUDTestCase_IUD-01-01-01_001-23")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a decimal value column using decimal value in quote
-test("IUD-01-01-01_001-24", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit)  = ('1.11')""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
-    Seq(Row(1.11)), "DataLoadingIUDTestCase_IUD-01-01-01_001-24")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a decimal value column using numeric value
-test("IUD-01-01-01_001-25", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit)  = (1)""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
-    Seq(Row(1.00)), "DataLoadingIUDTestCase_IUD-01-01-01_001-25")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a decimal value column (3,2) using numeric value which is greater than the allowed
-test("IUD-01-01-01_001-26", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit)  = (10)""").collect
-  checkAnswer(s"""select count(Active_status) from default.t_carbn01 where profit = 10 """,
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_001-26")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a decimal value column using String value
-test("IUD-01-01-01_001-27", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set (profit)  = ('hakshk')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column using numeric value
+  test("IUD-01-01-01_001-04", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = (234530508098098098080)""").collect
+    checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
+      Seq(Row("234530508098098098080")), "DataLoadingIUDTestCase_IUD -01-01-01_001-04")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a decimal value(3,2) column using a decimal value which is having 1 decimal
-test("IUD-01-01-01_001-28", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit)  = ('1.1')""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
-    Seq(Row(1.10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-28")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a decimal value(3,2) column using a decimal value which is having 3 decimal
-test("IUD-01-01-01_001-29", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit)  = ('1.118')""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
-    Seq(Row(1.12)), "DataLoadingIUDTestCase_IUD-01-01-01_001-29")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a double column using a decimal value which is having 3 decimal
-test("IUD-01-01-01_001-30", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)  = ('10.1116756')""").collect
-  checkAnswer(s"""select sell_pricep from default.t_carbn01  group by sell_pricep""",
-    Seq(Row(10.1116756)), "DataLoadingIUDTestCase_IUD-01-01-01_001-30")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a time stamp  value column using date timestamp
-test("IUD-01-01-01_001-31", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:13:59.113')""").collect
-  checkAnswer(s"""select update_time from default.t_carbn01  group by update_time""",
-    Seq(Row(Timestamp.valueOf("2016-11-04 18:13:59.0"))), "DataLoadingIUDTestCase_IUD-01-01-01_001-31")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a time stamp  value column using date timestamp all formats.
-test("IUD-01-01-01_001-35", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set(update_time) = ('04-11-20004 18:13:59.113')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column using numeric value in single quote
+  test("IUD-01-01-01_001-05", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = ('234530508098098098080')""").collect
+    checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
+      Seq(Row("234530508098098098080")), "DataLoadingIUDTestCase_IUD -01-01-01_001-05")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a time stamp  value column using string value
-test("IUD-01-01-01_001-32", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set(update_time) = ('fhjfhjfdshf')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column using decimal value
+  test("IUD-01-01-01_001-06", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = (2.55860986095689088)""").collect
+    checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
+      Seq(Row("2.55860986095689088")), "DataLoadingIUDTestCase_IUD-01 -01-01_001-06")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a time stamp  value column using numeric
-test("IUD-01-01-01_001-33", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set(update_time) = (56546)""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column using decimal value
+  test("IUD-01-01-01_001-07", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status) = ('2.55860986095689088')""").collect
+    checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
+      Seq(Row("2.55860986095689088")), "DataLoadingIUDTestCase_IUD-01 -01-01_001-07")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a time stamp  value column using date 
-test("IUD-01-01-01_001-34", Include) {
-  intercept[Exception] {
+
+  // Check for update Carbon table using a data value on a string column using string value which is having special characters
+  test("IUD-01-01-01_001-11", Include) {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
     sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04')""").collect
-    sql(s"""NA""").collect
+    // scalastyle:on lineLength
+    // scalastyle:off
+    sql("update default.t_carbn01  set (active_status) = ('fdfdskflksdf#?…..fdffs')").collect
+    checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
+      Seq(Row("fdfdskflksdf#?…..fdffs")), "DataLoadingIUDTestCase_IUD-01-01-01_001-11")
+    // scalastyle:on
+    // scalastyle:off lineLength
+    sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table for a time stamp  value column using date timestamp
-test("IUD-01-01-01_001-36", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:63:59.113')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table using a data value on a string column using array value having ')'
+  // Test("IUD-01-01-01_001-12", Include) {
+  //   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+  // sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+  // sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+  // sql(s"""update default.t_carbn01  set (active_status) = ('abd$asjdh$adasj$l;sdf$*)$*)(&^)')""").collect
+  //  checkAnswer(s"""select count(*) from t_carbn01b""",
+  //    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-12")
+  //   sql(s"""drop table default.t_carbn01  """).collect
+  // }
+
+
+  // Check for update Carbon table for a column where column  name is mentioned incorrectly
+  test("IUD-01-01-01_001-14", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set (item_status_cd)  = ('10')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for a time stamp  value column using date timestamp
-test("IUD-01-01-01_001-37", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:13:59.113435345345433 ')""").collect
-  checkAnswer(s"""select update_time from default.t_carbn01  group by update_time""",
-    Seq(Row(Timestamp.valueOf("2016-11-04 18:13:59.0"))), "DataLoadingIUDTestCase_IUD-01-01-01_001-37")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update Carbon table using a * operation on a column value
-test("IUD-01-01-01_001-40", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit*1.2, item_type_cd*3)""").collect
-  checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  group by profit, item_type_cd""",
-    Seq(Row(2.93,342),Row(2.93,369),Row(2.93,3),Row(2.93,6),Row(2.93,9),Row(2.93,12),Row(2.93,33),Row(2.93,39),Row(2.93,42),Row(2.93,123)), "DataLoadingIUDTestCase_IUD-01-01-01_001-40")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check update Carbon table using a / operation on a column value
-test("IUD-01-01-01_001-41", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set(item_type_cd)= (item_type_cd/1)""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a numeric value column
+  test("IUD-01-01-01_001-15", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd)  = (10)""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-15")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update Carbon table using a / operation on a column value
-test("IUD-01-01-01_001-42", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(profit)= (profit/1)""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
-    Seq(Row(2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_001-42")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update Carbon table using a - operation on a column value
-test("IUD-01-01-01_001-43", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit-1.2, item_type_cd-3)""").collect
-  checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  group by profit, item_type_cd""",
-    Seq(Row(1.24,111),Row(1.24,120),Row(1.24,0),Row(1.24,-1),Row(1.24,-2),Row(1.24,1),Row(1.24,8),Row(1.24,10),Row(1.24,11),Row(1.24,38)), "DataLoadingIUDTestCase_IUD-01-01-01_001-43")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update Carbon table using a + operation on a column value
-test("IUD-01-01-01_001-44", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit+1.2, item_type_cd+qty_day_avg)""").collect
-  checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  where profit = 3.64 and item_type_cd = 4291""",
-    Seq(Row(3.64,4291)), "DataLoadingIUDTestCase_IUD-01-01-01_001-44")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update Carbon table using a + operation on a column value which is string
-test("IUD-01-01-01_001-45", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set(item_code) = (item_code+1)""").collect
-  checkAnswer(s"""select count(*) from t_carbn01""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-45")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table without where clause
-test("IUD-01-01-01_002-01", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (active_status) = ('NO')""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
-    Seq(Row("NO")), "DataLoadingIUDTestCase_IUD-01-01-01_002-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table with where clause
-test("IUD-01-01-01_002-02", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (active_status) = ('NO') where active_status = 'TRUE' """).collect
-  checkAnswer(s"""select active_status from default.t_carbn01  where active_status='NO' group by active_status""",
-    Seq(Row("NO")), "DataLoadingIUDTestCase_IUD-01-01-01_002-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table with where exists clause
-test("IUD-01-01-01_002-03", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  X set (active_status) = ('NO') where exists (select 1 from default.t_carbn01b Y where Y.item_code = X.item_code)""").collect
-  checkAnswer(s"""select active_status from default.t_carbn01   group by active_status""",
-    Seq(Row("NO")), "DataLoadingIUDTestCase_IUD-01-01-01_002-03")
-   sql(s"""drop table default.t_carbn01""").collect
-}
-       
-
-//Check for delete Carbon table without where clause
-test("IUD-01-01-01_002-04", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Delete from default.t_carbn01 """).collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_002-04")
-   sql(s"""drop table default.t_carbn01 """).collect
-}
-       
-
-//Check for delete Carbon table with where clause
-test("IUD-01-01-01_002-05", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Delete from default.t_carbn01  where active_status = 'TRUE'""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where active_status='TRUE'""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_002-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for delete Carbon table with where exists clause
-test("IUD-01-01-01_002-06", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Delete from default.t_carbn01  X where exists (select 1 from default.t_carbn01b Y where Y.item_code = X.item_code)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_002-06")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-//Check for update Carbon table using query involving filters
-test("IUD-01-01-01_003-03", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set ( a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd = 2)""").collect
-  checkAnswer(s"""select item_type_cd, profit from default.t_carbn01  limit 1""",
-    Seq(Row(2,2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-03")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using query involving sub query
-test("IUD-01-01-01_003-04", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set ( a.item_type_cd, a.Profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where a.item_type_cd = b.item_type_cd and b.item_type_cd in (select c.item_type_cd from t_carbn02 c where c.item_type_cd=2))""").collect
-  checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 order by item_type_cd limit 1""",
-    Seq(Row(1,2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using query involving sub query
-test("IUD-01-01-01_003-04_01", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.item_type_cd, a.Profit) = (select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd not in (select c.item_type_cd from t_carbn02 c where c.item_type_cd != 2) and a.item_type_cd = b.item_type_cd)""").collect
-  checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 order by item_type_cd limit 1""",
-    Seq(Row(1,2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-04_01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using query involving Logical operation
-test("IUD-01-01-01_003-05", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  A set (a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.profit > 1 AND b.item_type_cd <3 and a.item_type_cd = b.item_type_cd)""").collect
-  checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 order by item_type_cd limit 1""",
-    Seq(Row(1,2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using query involving group by
-test("IUD-01-01-01_003-06", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd =2)""").collect
-  checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 limit 1""",
-    Seq(Row(2,2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-06")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using inner join and filter condition on a table to pick only non duplicate records
-test("IUD-01-01-01_003-07", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update t_carbn01 a set (a.active_status) = (select b.active_status from t_carbn01b b where a.item_type_cd = b.item_type_cd and b.item_code in (select item_code from t_carbn01b group by item_code, profit having count(*)>1))""").collect
-  checkAnswer(s"""select count(active_status) from t_carbn01 where active_status = 'true' limit 1""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_003-07")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using query involving max
-test("IUD-01-01-01_004-01", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update t_carbn01  a set (a.item_type_cd) = ((select c.code from (select max(b.item_type_cd) as code  from t_carbn01b b) c))""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01 limit 1""",
-    Seq(Row(123)), "DataLoadingIUDTestCase_IUD-01-01-01_004-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table using query involving spark functions
-test("IUD-01-01-01_004-02", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.create_date) = (select to_date(b.create_date) from default.t_carbn01b b where b.update_time = '2012-01-06 06:08:05.0')""").collect
-  checkAnswer(s"""select create_date from default.t_carbn01 limit 1""",
-    Seq(Row("2012-01-20")), "DataLoadingIUDTestCase_IUD-01-01-01_004-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for all data types using data values
-test("IUD-01-01-01_004-03", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,profit,item_code,item_name,outlet_name,update_time,create_date) = ('true',34,344,456,1,5.5,1.1,1.1,'hheh','gfhfhfdh','fghfdhdfh',current_timestamp,'01-10-1900') where item_code='ASD423ee'""").collect
-  checkAnswer(s"""select create_date from default.t_carbn01  where create_date = '01-10-1900' limit 1""",
-    Seq(Row("01-10-1900")), "DataLoadingIUDTestCase_IUD-01-01-01_004-03")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is havign numeric and target is having string value column for update
-test("IUD-01-01-01_004-04", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.item_code) = (select b.sell_price from default.t_carbn01b b where b.sell_price=200000000003454300)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is havign numeric and target is having decimal value column for update
-test("IUD-01-01-01_004-05", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.profit) = (select b.item_type_cd from default.t_carbn01b b where b.item_type_cd = 2 and b.active_status='TRUE' )""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  limit 1""",
-    Seq(Row(2.00)), "DataLoadingIUDTestCase_IUD-01-01-01_004-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table where source table is having big int and target is having int value column for update
-test("IUD-01-01-01_004-06", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  a set (a.item_type_cd) = (select b.sell_price from default.t_carbn01b b where b.sell_price=200000343430000000)""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a numeric value column in single quote
+  test("IUD-01-01-01_001-16", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd)  = ('10')""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-16")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is having string and target is having numeric value column for update
-test("IUD-01-01-01_004-07", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.item_code) = (select b.item_type_cd from default.t_carbn01b b where b.item_code='DE3423ee')""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  order by item_type_cd limit 1""",
-    Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_004-07")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table where source table is having string and target is having decimal value column for update
-test("IUD-01-01-01_004-08", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  a set (a.profit) = (select b.item_code from default.t_carbn01b b where b.item_code='DE3423ee')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a numeric value column using string value
+  test("IUD-01-01-01_001-17", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set (item_type_cd)  = ('Orange')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check for update Carbon table where source table is having string and target is having timestamp column for update
-test("IUD-01-01-01_004-09", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  a set (a.update_time) = (select b.item_code from default.t_carbn01b b where b.item_code='DE3423ee')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a numeric value column using decimal value
+  test("IUD-01-01-01_001-18", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set (item_type_cd)  = ('10.11')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is having decimal and target is having numeric column for update
-test("IUD-01-01-01_004-10", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.item_type_cd) = (select b.profit from default.t_carbn01b b where b.profit=2.445)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-10")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is having float and target is having numeric column for update
-test("IUD-01-01-01_004-11", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.item_type_cd) = (select b.sell_pricep from default.t_carbn01b b where b.sell_pricep=11.5)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-11")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is having float and target is having double column for update
-test("IUD-01-01-01_004-12", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.discount_price) = (select b.sell_pricep from default.t_carbn01b b where b.sell_pricep=11.5)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-12")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where source table is having Decimal(4,3)   and target is having Decimal(3,2) column for update
-test("IUD-01-01-01_004-13", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.profit) = (select b.profit*.2 from default.t_carbn01b b where b.profit=2.444)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-13")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table for all data types using query on a different table
-test("IUD-01-01-01_004-14", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.Active_status,a.Item_type_cd,a.Qty_day_avg,a.Qty_total,a.Sell_price,a.Sell_pricep,a.Discount_price,a.Profit,a.Item_code,a.Item_name,a.Outlet_name,a.Update_time,a.Create_date) = (select b.Active_status,b.Item_type_cd,b.Qty_day_avg,b.Qty_total,b.Sell_price,b.Sell_pricep,b.Discount_price,b.Profit,b.Item_code,b.Item_name,b.Outlet_name,b.Update_time,b.Create_date from default.t_carbn01b b where b.Item_type_cd=2)""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-14")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where a update column is having a shared dictionary. Check dictionary file being updated.
-test("IUD-01-01-01_005-11", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata TBLPROPERTIES("COLUMNPROPERTIES.Item_code.shared_column"="sharedFolder.Item_code")""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("Ram")), "DataLoadingIUDTestCase_IUD-01-01-01_005-11")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where a update column is measue and is defined with include ddictionary. Check dictionary file being updated.
-test("IUD-01-01-01_005-12", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Item_type_cd INT, Profit DECIMAL(3,2))STORED AS carbondata """).collect
- sql(s"""insert into default.t_carbn01  select item_type_cd, profit from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (100100)""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
-    Seq(Row(100100)), "DataLoadingIUDTestCase_IUD-01-01-01_005-12")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where a update column is dimension and is defined with exclude dictionary. 
-test("IUD-01-01-01_005-13", Include) {
-  sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-  sql(s"""create table default.t_carbn01 (Item_type_cd INT, Profit DECIMAL(3,2))STORED AS carbondata """).collect
-  sql(s"""insert into default.t_carbn01  select item_type_cd, profit from default.t_carbn01b""").collect
-  val currProperty = CarbonProperties.getInstance().getProperty(CarbonCommonConstants
-    .CARBON_BAD_RECORDS_ACTION);
-  CarbonProperties.getInstance()
-    .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL")
-  intercept[Exception] {
-    sql(s"""update default.t_carbn01  set (item_type_cd) = ('ASASDDD')""").collect
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, currProperty)
+
+
+  // Check for update Carbon table for a numeric Int value column using large numeric value
+  test("IUD-01-01-01_001-19", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd)  = (2147483647)""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
+      Seq(Row(2147483647)), "DataLoadingIUDTestCase_IUD-01-01-01_001-19")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  CarbonProperties.getInstance()
-    .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, currProperty)
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where a update column is dimension and is defined with exclude dictionary. 
-test("IUD-01-01-01_005-14", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata """).collect
- sql(s""" insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (Item_code) = ('Ram')""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("Ram")), "DataLoadingIUDTestCase_IUD-01-01-01_005-14")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update Carbon table where a update column is dimension and is defined with exclude dictionary. 
-test("IUD-01-01-01_005-15", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata """).collect
- sql(s""" insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (Item_code) = ('123')""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("123")), "DataLoadingIUDTestCase_IUD-01-01-01_005-15")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update on data in multiple blocks
-test("IUD-01-01-01_006-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_code) = ('Ram' ) where Item_code = 'RE3423ee'""").collect
-  sql(s"""select Item_code from default.t_carbn01  where Item_code = 'RE3423ee' group by item_code""").collect
-  
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update on data in multiple blocks
-test("IUD-01-01-01_007-01", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = ('120') where Item_type_cd = '114'""").collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01   where item_type_cd = 120 group by item_type_cd""",
-    Seq(Row(120)), "DataLoadingIUDTestCase_IUD-01-01-01_007-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//check update using parquet table
-test("IUD-01-01-02_022-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""drop table if exists T_Parq1""").collect
- sql(s"""create table T_Parq1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(500), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE)stored as parquet""").collect
- sql(s"""insert into T_Parq1 select * from t_hive1""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.Active_status,a.Item_type_cd,a.Qty_day_avg,a.Qty_total,a.Sell_price,a.Sell_pricep,a.Discount_price,a.Profit,a.Item_code,a.Item_name,a.Outlet_name,a.Update_time,a.Create_date) = (select b.Active_status,b.Item_type_cd,b.Qty_day_avg,b.Qty_total,b.Sell_price,b.Sell_pricep,b.Discount_price,b.Profit,b.Item_code,b.Item_name,b.Outlet_name,b.Update_time,b.Create_date from T_Parq1 b where a.item_type_cd = b.item_type_cd)""").collect
-  checkAnswer(s"""select profit from default.t_carbn01   group by profit""",
-    Seq(Row(2.44)), "DataLoadingIUDTestCase_IUD-01-01-02_022-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update on carbon table using query on Parquet table
-test("IUD-01-01-01_009-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""drop table if exists T_Parq1""").collect
- sql(s"""create table T_Parq1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(500), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE)stored as parquet""").collect
- sql(s"""insert into T_Parq1 select * from t_hive1""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  a set (a.Active_status,a.Item_type_cd,a.Qty_day_avg,a.Qty_total,a.Sell_price,a.Sell_pricep,a.Discount_price,a.Profit,a.Item_code,a.Item_name,a.Outlet_name,a.Update_time,a.Create_date) = (select b.Active_status,b.Item_type_cd,b.Qty_day_avg,b.Qty_total,b.Sell_price,b.Sell_pricep,b.Discount_price,b.Profit,b.Item_code,b.Item_name,b.Outlet_name,b.Update_time,b.Create_date from T_Parq1 b where a.item_type_cd = b.item_type_cd)""").collect
-  checkAnswer(s"""select profit from default.t_carbn01   group by profit""",
-    Seq(Row(2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_009-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check update on carbon table using incorrect data value
-test("IUD-01-01-01_010-01", Include) {
-  intercept[Exception] {
-    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""update default.t_carbn01  set Update_time = '11-11-2012 77:77:77') where item_code='ASD423ee')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a numeric Int value column using large numeric negative value
+  test("IUD-01-01-01_001-20", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd)  = (-2147483648)""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
+      Seq(Row(-2147483648)), "DataLoadingIUDTestCase_IUD-01-01-01_001-20")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a numeric Int value column using large numeric value which is beyond 32 bit
+  test("IUD-01-01-01_001-21", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set (item_type_cd)  = (-2147483649)""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check multiple updates on the same column - for correctness of data and horizontal compaction of delta file
-test("IUD-01-01-02_001-02", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram')""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange') where item_code = 'Ram'""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("Orange")), "DataLoadingIUDTestCase_IUD-01-01-02_001-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for compaction of delta files within a segment working fine as per the configuration
-test("IUD-01-01-02_003-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram')""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange') where item_code = 'Ram'""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("Orange")), "DataLoadingIUDTestCase_IUD-01-01-02_003-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check multiple updates on the same column - for correctness of data along with horizontal compaction of delta file
-test("IUD-01-01-02_002-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram')""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange') where item_code = 'Ram'""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("Orange")), "DataLoadingIUDTestCase_IUD-01-01-02_002-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check multiple updates on the different column - for correctness of data and horizontal compaction of delta file
-test("IUD-01-01-01_012-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01 set (Item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
-  checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
-    Seq(Row("Banana")), "DataLoadingIUDTestCase_IUD-01-01-01_012-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for delta files handling during table compaction and not breaking the data integrity
-test("IUD-01-01-02_004-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') """).collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
- sql(s"""ALTER TABLE T_Carbn01 COMPACT 'MINOR'""").collect
- sql(s"""select item_code from default.t_carbn01  group by item_code""").collect
-  checkAnswer(s"""select item_code from t_carbn01  group by item_code""",
-    Seq(Row("Ram")), "DataLoadingIUDTestCase_IUD-01-01-02_004-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update by doing data insert before and after update also check data consistency, no residual file left in HDFS
-test("IUD-01-01-02_006-01", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2 """).collect
- sql(s"""insert into t_carbn01 select * from t_carbn01b""").collect
-  checkAnswer(s"""select count(profit) from default.t_carbn01""",
-    Seq(Row(20)), "DataLoadingIUDTestCase_IUD-01-01-02_006-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update by doing data load before and after update also check data consistency, no residual file left in HDFS
-test("IUD-01-01-02_006-02", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2 """).collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
- sql(s"""select count(*) from default.t_carbn01""").collect
-  checkAnswer(s"""select count(profit) from default.t_carbn01""",
-    Seq(Row(20)), "DataLoadingIUDTestCase_IUD-01-01-02_006-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//do a delete rows after update and see that the updated columns are deleted
-test("IUD-01-01-02_006-12", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2 """).collect
- sql(s"""delete from default.t_carbn01  where profit = 1.2 """).collect
-  sql(s"""select count(profit) from default.t_carbn01  where (profit=1.2) or (item_type_cd=2)  group by profit""").collect
-  
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//do an update after delete rows and see that update is not done on the deleted rows(should not fethch those rows)
-test("IUD-01-01-02_006-13", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
- sql(s"""update default.t_carbn01  set (profit) = (1.22) where item_type_cd = 2""").collect
-  sql(s"""select count(profit) from default.t_carbn01  where profit = 1.22 group by profit""").collect
-  
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query with count(column) after update and esnure the correct count is fetched
-test("IUD-01-01-01_014-01", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
-  checkAnswer(s"""select count(profit) from  default.t_carbn01  where profit=1.2 """,
-    Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_014-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query with count(*) after delete and esnure the correct count is fetched
-test("IUD-01-01-01_014-02", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
-  checkAnswer(s"""select count(*) from  default.t_carbn01  where item_type_cd = 2""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query with a filter condition after update and esnure the correct count is fetched
-test("IUD-01-01-01_014-03", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
-  checkAnswer(s"""select count(profit) from  default.t_carbn01  where profit=1.2""",
-    Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_014-03")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query with a filter condition after delete and esnure the correct count is fetched
-test("IUD-01-01-01_014-04", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
-  checkAnswer(s"""select count(*) from  default.t_carbn01  where item_type_cd = 2""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select * on table after update operation and ensure the correct data is fetched
-test("IUD-01-01-01_014-05", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  where profit = 1.2 group by profit""",
-    Seq(Row(1.20)), "DataLoadingIUDTestCase_IUD-01-01-01_014-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select (coumn) on table after update operation and ensure the correct data is fetched
-test("IUD-01-01-01_014-06", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
-  checkAnswer(s"""select profit from default.t_carbn01  where profit = 1.2 group by profit""",
-    Seq(Row(1.20)), "DataLoadingIUDTestCase_IUD-01-01-01_014-06")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select * on table after delete operation and ensure the correct data is fetched
-test("IUD-01-01-01_014-07", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""delete from default.t_carbn01 """).collect
-  checkAnswer(s"""select count(*) from  default.t_carbn01  """,
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-07")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select (coumn) on table after delete operation and ensure the correct data is fetched
-test("IUD-01-01-01_014-08", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""delete from default.t_carbn01 """).collect
-  sql(s"""select profit from  default.t_carbn01 """).collect
-  
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query joining another carbon table after update is done and check that correct data is fetched
-test("IUD-01-01-01_014-09", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (20) where item_type_cd in (2)""").collect
-  checkAnswer(s""" select c.item_type_cd from default.t_carbn01  c  where exists(select a.item_type_cd from default.t_carbn01  a, default.t_carbn01b b  where a.item_type_cd = b.item_type_cd)and c.item_type_cd = 20""",
-    Seq(Row(20)), "DataLoadingIUDTestCase_IUD-01-01-01_014-09")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query joining another carbon table after delete  is done and check that correct data is fetched
-test("IUD-01-01-01_014-10", Include) {
-   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  where qty_day_avg < 4550""").collect
-  checkAnswer(s"""select a.qty_day_avg, a.item_code from default.t_carbn01  a, default.t_carbn01b b  where a.qty_day_avg = b.qty_day_avg """,
-    Seq(Row(4590,"ASD423ee")), "DataLoadingIUDTestCase_IUD-01-01-01_014-10")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query with limit condition after delete is done and check that correct data is fetched
-test("IUD-01-01-01_014-15", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where qty_day_avg >= 4500 limit 3 """,
-    Seq(Row(4)), "DataLoadingIUDTestCase_IUD-01-01-01_014-15")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query when the data is distrbuted in multiple blocks(do multiple insert on the table) after an update and check the correct data is fetched
-test("IUD-01-01-01_014-16", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (20) where item_type_cd < 10""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where item_type_cd < 10""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-16")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query when the data is distrbuted in single blocks(do single insert on the table and keep data small) after an update and check the correct data is fetched
-test("IUD-01-01-01_014-17", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s""" update default.t_carbn01  set (item_type_cd) = (20) where item_type_cd < 10""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where item_type_cd =20""",
-    Seq(Row(4)), "DataLoadingIUDTestCase_IUD-01-01-01_014-17")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query when the data is distrbuted in multiple blocks(do multiple insert on the table) after an delete and check the correct data is fetched
-test("IUD-01-01-01_014-18", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01   where item_type_cd < 10""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where item_type_cd < 10""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-18")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check data consistency when select is executed after multiple updates on different columns
-test("IUD-01-01-01_015-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (1111)""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (1111)""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (1111)""").collect
- sql(s"""select item_code, item_type_cd from default.t_carbn01  group by item_code, item_type_cd""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where item_code = 'Banana'""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_015-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check data consistency when select is executed after multiple updates on same row and same columns
-test("IUD-01-01-01_016-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
- sql(s"""update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
- sql(s"""update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
- sql(s"""update default.t_carbn01  set (item_code) = ('Orange')""").collect
-  checkAnswer(s"""select count(*) from default.t_carbn01  where item_code = 'Orange'""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_016-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query on the updated column after multiple updates on the same column at different rows(control this using where condition) and enforce horizontal compaction and see that there is no data loss
-test("IUD-01-01-01_016-02", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
- sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
- sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
- sql(s"""select count(*) from default.t_carbn01 """).collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_016-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query after multiple deletes(control this using where condition) and enforce horizontal compaction and see that there is no data loss
-test("IUD-01-01-01_016-03", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 123 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 41 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 14 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 13 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 114 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 11 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 3 """).collect
- sql(s"""delete from default.t_carbn01  where item_type_cd = 4 """).collect
-  checkAnswer(s"""select count(*) from default.t_carbn01 """,
-    Seq(Row(2)), "DataLoadingIUDTestCase_IUD-01-01-01_016-03")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run select query on the updated column after multiple updates on different column at different rows(control this using where condition) and enforce horizontal compaction and see that there is no data loss
-test("IUD-01-01-01_016-04", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 123""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 41""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 14""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 13""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 114""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 11""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 3""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 4""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 2""").collect
- sql(s"""select item_type_cd from default.t_carbn01  group by item_type_cd """).collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01 order by item_type_cd limit 1""",
-    Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_016-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Run alternate update and insert and do a vertical compaction and see that there is no data loss
-test("IUD-01-01-01_016-06", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""").collect
-  checkAnswer(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""",
-    Seq(Row("Orange",60)), "DataLoadingIUDTestCase_IUD-01-01-01_016-06")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//do a delete after segment deletion and see that the delta files are not created in the deleted segmnet
-test("IUD-01-01-02_006-15", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from table default.t_carbn01  where segment.id in (1) """).collect
- sql(s"""delete from t_carbn01 where item_type_cd =14""").collect
-  checkAnswer(s"""select count(*)  from default.t_carbn01""",
-    Seq(Row(9)), "DataLoadingIUDTestCase_IUD-01-01-02_006-15")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//do an update after segment delete and see that delta files are not created in the deleted segments
-test("IUD-01-01-02_006-14", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from table default.t_carbn01  where segment.id in (1) """).collect
- sql(s"""update t_carbn01 set (item_code) = ('Apple')""").collect
-  checkAnswer(s"""select count(*)  from default.t_carbn01 where item_code = 'Apple'""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-02_006-14")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check data consistency when select is executed after update and delete segment
-test("IUD-01-01-02_007-01", Include) {
+
+  // Check for update Carbon table for a numeric BigInt value column using large numeric value which is at the boundary of 64 bit
+  test("IUD-01-01-01_001-22", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""delete from table default.t_carbn01  where segment.id in (2) """).collect
- sql(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""").show
-  checkAnswer(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""",
-    Seq(Row("Banana",20)), "DataLoadingIUDTestCase_IUD-01-01-02_007-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (sell_price)  = (9223372036854775807)""").collect
+    checkAnswer(s"""select sell_price from default.t_carbn01  group by sell_price""",
+      Seq(Row(9223372036854775807L)), "DataLoadingIUDTestCase_IUD-01-01-01_001-22")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
 
-//Check select after deleting segment and reloading and reupdating same data.
-test("IUD-01-01-02_008-01", Include) {
+  // Check for update Carbon table for a decimal value column using decimal value
+  test("IUD-01-01-01_001-23", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""delete from table default.t_carbn01  where segment.id in (0)""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
-  checkAnswer(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""",
-    Seq(Row("Banana",10)), "DataLoadingIUDTestCase_IUD-01-01-02_008-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.11)""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
+      Seq(Row(1.11)), "DataLoadingIUDTestCase_IUD-01-01-01_001-23")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
 
-//Run 2 deletes on a table where update is done after data load - 1 block from load, 1 block from update in a segment(set detla threshold = 1).
-test("IUD-01-01-02_009-01", Include) {
+
+  // Check for update Carbon table for a decimal value column using decimal value in quote
+  test("IUD-01-01-01_001-24", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-  sql(s"""select item_type_cd from default.t_carbn01""").show(100, false)
- sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
- sql(s"""delete from t_carbn01 where item_type_cd =2""").collect
- sql(s"""delete from t_carbn01 where item_type_cd =14""").collect
-  checkAnswer(s"""select count(item_type_cd)  from default.t_carbn01""",
-    Seq(Row(8)), "DataLoadingIUDTestCase_IUD-01-01-02_009-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit)  = ('1.11')""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
+      Seq(Row(1.11)), "DataLoadingIUDTestCase_IUD-01-01-01_001-24")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
 
-//Check update on carbon table where a column being updated with incorrect data type.
-test("IUD-01-01-02_011-01", Include) {
-  intercept[Exception] {
-    sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""Update T_Carbn04 set (Item_type_cd) = ('Banana')""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a decimal value column using numeric value
+  test("IUD-01-01-01_001-25", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit)  = (1)""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
+      Seq(Row(1.00)), "DataLoadingIUDTestCase_IUD-01-01-01_001-25")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check update on empty carbon table where a column being updated with incorrect data type.
-test("IUD-01-01-01_022-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""Update default.t_carbn01  set (item_type_cd) = (11) """).collect
-  checkAnswer(s"""select item_type_cd from default.t_carbn01  where item_type_cd=11 limit 1""",
-    Seq(Row(11)), "DataLoadingIUDTestCase_IUD-01-01-01_022-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check update on carbon table where multiple values are returned in expression.
-test("IUD-01-01-01_023-00", Include) {
-  intercept[Exception] {
-    sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""Update default.t_carbn01  set Item_type_cd = (select Item_type_cd from default.t_carbn01b )""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a decimal value column (3,2) using numeric value which is greater than the allowed
+  test("IUD-01-01-01_001-26", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit)  = (10)""").collect
+    checkAnswer(s"""select count(Active_status) from default.t_carbn01 where profit = 10 """,
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_001-26")
+     sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//check update using case statement joiining 2 tables
-test("IUD-01-01-02_023-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""drop table if exists T_Parq1""").collect
- sql(s"""create table T_Parq1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(500), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE)stored as parquet""").collect
- sql(s"""insert into T_Parq1 select * from t_hive1""").collect
- sql(s"""update t_carbn01 a set(a.item_code) = (select (case when b.item_code = 'RE3423ee' then c.item_code else b.item_code end) from t_parq1 b, t_hive1 c where b.item_type_cd = 14 and b.item_type_cd=c.item_type_cd)""").collect
-  checkAnswer(s"""select count(item_code) from default.t_carbn01  where item_code = 'SE3423ee'""",
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-02_023-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
 
-//Check update on carbon table where non matching values are returned from expression.
-test("IUD-01-01-01_024-01", Include) {
-  intercept[Exception] {
-    sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-    sql(s"""Update default.t_carbn01  set Item_type_cd = (select Item_code from default.t_carbn01b)""").collect
-    sql(s"""NA""").collect
+
+  // Check for update Carbon table for a decimal value column using String value
+  test("IUD-01-01-01_001-27", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set (profit)  = ('hakshk')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
   }
-  sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a base64 function value
-test("IUD-01-01-01_040-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (base64('A')) """).collect
-  checkAnswer(s""" select count(active_status) from default.t_carbn01  group by active_status """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_040-01")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a ascii function value
-test("IUD-01-01-01_040-02", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (ascii(FALSE)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("102")), "DataLoadingIUDTestCase_IUD-01-01-01_040-02")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a concat function value
-test("IUD-01-01-01_040-03", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (concat('FAL','SE')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("FALSE")), "DataLoadingIUDTestCase_IUD-01-01-01_040-03")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column to a value returned by concat_ws function
-test("IUD-01-01-01_040-04", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (concat_ws('FAL','SE')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("SE")), "DataLoadingIUDTestCase_IUD-01-01-01_040-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column to a value returned by find_in_set function
-test("IUD-01-01-01_040-05", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (find_in_set('t','test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("0")), "DataLoadingIUDTestCase_IUD-01-01-01_040-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column to a value returned by format_number function
-test("IUD-01-01-01_040-06", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (format_number(10,12)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("10.000000000000")), "DataLoadingIUDTestCase_IUD-01-01-01_040-06")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column to a value returned by get_json_object function
-test("IUD-01-01-01_040-07", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (get_json_object('test','test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row(null)), "DataLoadingIUDTestCase_IUD-01-01-01_040-07")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by instr function
-test("IUD-01-01-01_040-08", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (instr('test','test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("0")), "DataLoadingIUDTestCase_IUD-01-01-01_040-08")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by length function
-test("IUD-01-01-01_040-09", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (length('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("4")), "DataLoadingIUDTestCase_IUD-01-01-01_040-09")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by locate function
-test("IUD-01-01-01_040-10", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (locate('test','test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("1")), "DataLoadingIUDTestCase_IUD-01-01-01_040-10")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by lower function
-test("IUD-01-01-01_040-11", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (lower('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-11")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by lcase function
-test("IUD-01-01-01_040-12", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (lcase('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-12")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by lpad function
-test("IUD-01-01-01_040-13", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (lpad('te',1,'test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("t")), "DataLoadingIUDTestCase_IUD-01-01-01_040-13")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by ltrim function
-test("IUD-01-01-01_040-14", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (ltrim('te')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("te")), "DataLoadingIUDTestCase_IUD-01-01-01_040-14")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by parse_url function
-test("IUD-01-01-01_040-15", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (parse_url('test','test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row(null)), "DataLoadingIUDTestCase_IUD-01-01-01_040-15")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by printf function
-test("IUD-01-01-01_040-16", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (printf('test','test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-16")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by regexp_extract function
-test("IUD-01-01-01_040-17", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (regexp_extract('test','test1',1)) """).collect
-  checkAnswer(s""" select count(active_status) from default.t_carbn01  group by active_status """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_040-17")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by regexp_replace function
-test("IUD-01-01-01_040-18", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (regexp_replace('test','test1','test2')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-18")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by repeat function
-test("IUD-01-01-01_040-19", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (repeat('test',1)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-19")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by reverse function
-test("IUD-01-01-01_040-20", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (reverse('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("tset")), "DataLoadingIUDTestCase_IUD-01-01-01_040-20")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by rpad function
-test("IUD-01-01-01_040-21", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (rpad('test',1,'test1')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("t")), "DataLoadingIUDTestCase_IUD-01-01-01_040-21")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by rtrim function
-test("IUD-01-01-01_040-22", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (rtrim('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-22")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by sentences function
-ignore("IUD-01-01-01_040-23", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (sentences('Hello there! How are you?')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("Hello\\:there\\\\$How\\:are:\\you\\\\")), "DataLoadingIUDTestCase_IUD-01-01-01_040-23")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
 
 
-  //Check for updating carbon table set column value to a value returned by space function
-  test("IUD-01-01-01_040-24", Include) {
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
-    sql(s"""select active_status from default.t_carbn01b""").show
-  sql(s"""select active_status from default.t_carbn01""").show
-
-  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
-  sql(s"""select active_status from default.t_carbn01""").show(100, false)
- sql(s"""update default.t_carbn01  set (active_status)= (space(1)) """).collect
-  checkAnswer(s"""select count(active_status) from default.t_carbn01  group by active_status """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_040-24")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by split function
-//Split will give us array value
-test("IUD-01-01-01_040-25", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- intercept[Exception] {
-   sql(s"""update default.t_carbn01  set (active_status)= (split('t','a')) """).collect
- }
-}
-       
-
-//Check for updating carbon table set column value to a value returned by substr function with 2 parameters
-test("IUD-01-01-01_040-26", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (substr('test',1)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-26")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by substring function with 2 parameters
-test("IUD-01-01-01_040-27", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (substring('test',1,2)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("te")), "DataLoadingIUDTestCase_IUD-01-01-01_040-27")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by substring function  with 3 parameters
-test("IUD-01-01-01_040-28", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (substr('test1',2,3)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("est")), "DataLoadingIUDTestCase_IUD-01-01-01_040-28")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by translate function
-test("IUD-01-01-01_040-29", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (translate('test','test1','test2')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-29")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by trim function
-test("IUD-01-01-01_040-30", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (trim('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-30")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by unbase64 function
-test("IUD-01-01-01_040-31", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (unbase64('test')) """).collect
-  checkAnswer(s""" select count(*) from default.t_carbn01  group by active_status""",
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_040-31")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by upper function
-test("IUD-01-01-01_040-32", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (upper('test')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("TEST")), "DataLoadingIUDTestCase_IUD-01-01-01_040-32")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by lower function
-test("IUD-01-01-01_040-33", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (lower('TEST')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("test")), "DataLoadingIUDTestCase_IUD-01-01-01_040-33")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by levenshtein function
-test("IUD-01-01-01_040-35", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= ( levenshtein('kitten','sitting')) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row("3")), "DataLoadingIUDTestCase_IUD-01-01-01_040-35")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by round function with single parameter
-test("IUD-01-01-01_040-36", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s""" update default.t_carbn01  set (sell_pricep)= (round(10.66)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(11.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-36")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by round function with 2  parameters
-test("IUD-01-01-01_040-37", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (round(10.66,1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(10.7)), "DataLoadingIUDTestCase_IUD-01-01-01_040-37")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by bround function having single parameter
-test("IUD-01-01-01_040-38", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (bround(10.66)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(11.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-38")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by bround function having 2 parameters
-test("IUD-01-01-01_040-39", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (bround(10.66,1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(10.7)), "DataLoadingIUDTestCase_IUD-01-01-01_040-39")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by floor function
-test("IUD-01-01-01_040-40", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (floor(10.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(10.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-40")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by ceil function
-test("IUD-01-01-01_040-41", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (ceil(10.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(11.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-41")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by ceiling function
-test("IUD-01-01-01_040-42", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (ceiling(11.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(12.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-42")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by exp function with parameters
-test("IUD-01-01-01_040-45", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (exp(10.1234242323)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(24919.956624251117)), "DataLoadingIUDTestCase_IUD-01-01-01_040-45")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by ln function with parameters
-test("IUD-01-01-01_040-46", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (ln(10.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(2.312535423847214)), "DataLoadingIUDTestCase_IUD-01-01-01_040-46")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by log10 function
-test("IUD-01-01-01_040-47", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (log10(10.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.0043213737826426)), "DataLoadingIUDTestCase_IUD-01-01-01_040-47")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by log2 function
-test("IUD-01-01-01_040-48", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (log2(10.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(3.3362833878644325)), "DataLoadingIUDTestCase_IUD-01-01-01_040-48")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by log function
-test("IUD-01-01-01_040-49", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (log(10.1,10.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.0042603872534936)), "DataLoadingIUDTestCase_IUD-01-01-01_040-49")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by pow function
-test("IUD-01-01-01_040-50", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (pow(10.1,10.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.754195580765244E10)), "DataLoadingIUDTestCase_IUD-01-01-01_040-50")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by power function
-test("IUD-01-01-01_040-51", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (power(11.1,11.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(5.100554147653899E11)), "DataLoadingIUDTestCase_IUD-01-01-01_040-51")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by sqrt function
-test("IUD-01-01-01_040-52", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (sqrt(11.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(3.331666249791536)), "DataLoadingIUDTestCase_IUD-01-01-01_040-52")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by bin function
-test("IUD-01-01-01_040-53", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s""" update default.t_carbn01  set (sell_pricep)= (bin(1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-53")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by hex function
-test("IUD-01-01-01_040-54", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (hex(1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-54")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by unhex function
-test("IUD-01-01-01_040-55", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (unhex(1)) """).collect
-  checkAnswer(s"""select count(active_status) from default.t_carbn01  group by active_status """,
-    Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_040-55")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by conv function
-test("IUD-01-01-01_040-56", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (active_status)= (conv(1,1,2)) """).collect
-  checkAnswer(s""" select active_status from default.t_carbn01  group by active_status """,
-    Seq(Row(null)), "DataLoadingIUDTestCase_IUD-01-01-01_040-56")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by abs function
-test("IUD-01-01-01_040-57", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (abs(1.9)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.9)), "DataLoadingIUDTestCase_IUD-01-01-01_040-57")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by pmod function
-test("IUD-01-01-01_040-58", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (pmod(1,2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-58")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by sin function
-test("IUD-01-01-01_040-59", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (sin(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(0.9320390859672263)), "DataLoadingIUDTestCase_IUD-01-01-01_040-59")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by cos function
-test("IUD-01-01-01_040-60", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (cos(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(0.3623577544766736)), "DataLoadingIUDTestCase_IUD-01-01-01_040-60")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by tan function
-test("IUD-01-01-01_040-61", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (tan(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(2.5721516221263188)), "DataLoadingIUDTestCase_IUD-01-01-01_040-61")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by atan function
-test("IUD-01-01-01_040-62", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (atan(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(0.8760580505981934)), "DataLoadingIUDTestCase_IUD-01-01-01_040-62")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by degrees function
-test("IUD-01-01-01_040-63", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (degrees(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(68.75493541569878)), "DataLoadingIUDTestCase_IUD-01-01-01_040-63")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by radians function
-test("IUD-01-01-01_040-64", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (radians(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(0.020943951023931952)), "DataLoadingIUDTestCase_IUD-01-01-01_040-64")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by positive function
-test("IUD-01-01-01_040-65", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (positive(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.2)), "DataLoadingIUDTestCase_IUD-01-01-01_040-65")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by negative function
-test("IUD-01-01-01_040-66", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (negative(1.2)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(-1.2)), "DataLoadingIUDTestCase_IUD-01-01-01_040-66")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by sign function
-test("IUD-01-01-01_040-67", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (sign(2.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-67")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by e() function
-test("IUD-01-01-01_040-68", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (e()) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(2.718281828459045)), "DataLoadingIUDTestCase_IUD-01-01-01_040-68")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by pi() function
-test("IUD-01-01-01_040-69", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (sell_pricep)= (pi()) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(3.141592653589793)), "DataLoadingIUDTestCase_IUD-01-01-01_040-69")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by factorial function
-test("IUD-01-01-01_040-70", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s""" update default.t_carbn01  set (sell_pricep)= (factorial(5)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(120.0)), "DataLoadingIUDTestCase_IUD-01-01-01_040-70")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by cbrt function
-test("IUD-01-01-01_040-71", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s""" update default.t_carbn01  set (sell_pricep)= (cbrt(5.1)) """).collect
-  checkAnswer(s"""select  sell_pricep from default.t_carbn01  group by  sell_pricep """,
-    Seq(Row(1.721300620726316)), "DataLoadingIUDTestCase_IUD-01-01-01_040-71")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by greatest function
-test("IUD-01-01-01_040-72", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd)= (greatest(2,3)) """).collect
-  checkAnswer(s"""select  item_type_cd from default.t_carbn01  group by  item_type_cd """,
-    Seq(Row(3)), "DataLoadingIUDTestCase_IUD-01-01-01_040-72")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for updating carbon table set column value to a value returned by least function
-test("IUD-01-01-01_040-73", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""update default.t_carbn01  set (item_type_cd)= (least(2,3)) """).collect
-  checkAnswer(s"""select  item_type_cd from default.t_carbn01  group by  item_type_cd """,
-    Seq(Row(2)), "DataLoadingIUDTestCase_IUD-01-01-01_040-73")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for delete where in (select from tabl2)
-test("IUD-01-01-02_023-03", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""create database if not exists test1""").collect
- sql(s"""create table if not exists test1.t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into test1.t_carbn02 select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  a where (a.item_code) in (select b.item_code from test1.t_carbn02 b)""").collect
-  sql(s"""select  item_type_cd from default.t_carbn01  order by  item_type_cd limit 1""").collect
-  
-   sql(s"""drop table default.t_carbn01 """).collect
- sql(s"""drop table test1.t_carbn02""").collect
- sql(s"""drop database test1""").collect
-}
-       
-
-//delete using a temp table
-test("IUD-01-01-02_023-04", Include) {
-   sql(s"""create table IF NOT EXISTS  default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  a where a.item_code in (select b.item_code from (select c.item_code, c.item_type_cd from t_carbn01b c)b)""").collect
-  checkAnswer(s"""select  count(*) from t_carbn01  """,
-    Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-02_023-04")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for delete using a temp table using group by using subquery
-test("IUD-01-01-02_023-05", Include) {
-   sql(s"""create table IF NOT EXISTS  default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
- sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
- sql(s"""delete from default.t_carbn01  a where a.item_type_cd in ( select b.profit from (select sum(item_type_cd) profit from default.t_carbn01b group by item_code) b)""").collect
-  checkAnswer(s"""select  item_type_cd from default.t_carbn01  order by  item_type_cd  limit 1""",
-    Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-02_023-05")
-   sql(s"""drop table default.t_carbn01  """).collect
-}
-       
-
-//Check for update with null value for multiple update operations
-test("IUD-01-01-01_016-05", Include) {
-   sql(s"""CREATE TABLE table_C21 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect
- sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv'  into table table_C21 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
- sql(s"""update table_C21 set (cust_id)= (10000) where cust_name='CUST_NAME_00000'""").collect
- sql(s"""update table_C21 set (cust_id)= (NULL) where cust_name='CUST_NAME_00000'""").collect
- sql(s"""update table_C21 set (cust_name)= (NULL) where cust_id='9001'""").collect
-  checkAnswer(s"""select cust_name from table_C21 where cust_id='9001'""",
-    Seq(Row(null)), "DataLoadingIUDTestCase_IUD-01-01-01_016-05")
-   sql(s"""drop table table_C21 """).collect
-}
-       
+  // Check for update Carbon table for a decimal value(3,2) column using a decimal value which is having 1 decimal
+  test("IUD-01-01-01_001-28", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit)  = ('1.1')""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
+      Seq(Row(1.10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-28")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a decimal value(3,2) column using a decimal value which is having 3 decimal
+  test("IUD-01-01-01_001-29", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit)  = ('1.118')""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
+      Seq(Row(1.12)), "DataLoadingIUDTestCase_IUD-01-01-01_001-29")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a double column using a decimal value which is having 3 decimal
+  test("IUD-01-01-01_001-30", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (sell_pricep)  = ('10.1116756')""").collect
+    checkAnswer(s"""select sell_pricep from default.t_carbn01  group by sell_pricep""",
+      Seq(Row(10.1116756)), "DataLoadingIUDTestCase_IUD-01-01-01_001-30")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using date timestamp
+  test("IUD-01-01-01_001-31", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:13:59.113')""").collect
+    checkAnswer(s"""select update_time from default.t_carbn01  group by update_time""",
+      Seq(Row(Timestamp.valueOf("2016-11-04 18:13:59.0"))), "DataLoadingIUDTestCase_IUD-01-01-01_001-31")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using date timestamp all formats.
+  test("IUD-01-01-01_001-35", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set(update_time) = ('04-11-20004 18:13:59.113')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using string value
+  test("IUD-01-01-01_001-32", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set(update_time) = ('fhjfhjfdshf')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using numeric
+  test("IUD-01-01-01_001-33", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set(update_time) = (56546)""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using date
+  test("IUD-01-01-01_001-34", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using date timestamp
+  test("IUD-01-01-01_001-36", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:63:59.113')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for a time stamp  value column using date timestamp
+  test("IUD-01-01-01_001-37", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:13:59.113435345345433 ')""").collect
+    checkAnswer(s"""select update_time from default.t_carbn01  group by update_time""",
+      Seq(Row(Timestamp.valueOf("2016-11-04 18:13:59.0"))), "DataLoadingIUDTestCase_IUD-01-01-01_001-37")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update Carbon table using a * operation on a column value
+  test("IUD-01-01-01_001-40", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit*1.2, item_type_cd*3)""").collect
+    checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  group by profit, item_type_cd""",
+      Seq(Row(2.93, 342), Row(2.93, 369), Row(2.93, 3), Row(2.93, 6), Row(2.93, 9), Row(2.93, 12), Row(2.93, 33), Row(2.93, 39), Row(2.93, 42), Row(2.93, 123)), "DataLoadingIUDTestCase_IUD-01-01-01_001-40")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update Carbon table using a / operation on a column value
+  test("IUD-01-01-01_001-41", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set(item_type_cd)= (item_type_cd/1)""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update Carbon table using a / operation on a column value
+  test("IUD-01-01-01_001-42", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(profit)= (profit/1)""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
+      Seq(Row(2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_001-42")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update Carbon table using a - operation on a column value
+  test("IUD-01-01-01_001-43", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit-1.2, item_type_cd-3)""").collect
+    checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  group by profit, item_type_cd""",
+      Seq(Row(1.24, 111), Row(1.24, 120), Row(1.24, 0), Row(1.24, -1), Row(1.24, -2), Row(1.24, 1), Row(1.24, 8), Row(1.24, 10), Row(1.24, 11), Row(1.24, 38)), "DataLoadingIUDTestCase_IUD-01-01-01_001-43")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update Carbon table using a + operation on a column value
+  test("IUD-01-01-01_001-44", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit+1.2, item_type_cd+qty_day_avg)""").collect
+    checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  where profit = 3.64 and item_type_cd = 4291""",
+      Seq(Row(3.64, 4291)), "DataLoadingIUDTestCase_IUD-01-01-01_001-44")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update Carbon table using a + operation on a column value which is string
+  test("IUD-01-01-01_001-45", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set(item_code) = (item_code+1)""").collect
+    checkAnswer(s"""select count(*) from t_carbn01""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-45")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table without where clause
+  test("IUD-01-01-01_002-01", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (active_status) = ('NO')""").collect
+    checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
+      Seq(Row("NO")), "DataLoadingIUDTestCase_IUD-01-01-01_002-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table with where clause
+  test("IUD-01-01-01_002-02", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (active_status) = ('NO') where active_status = 'TRUE' """).collect
+    checkAnswer(s"""select active_status from default.t_carbn01  where active_status='NO' group by active_status""",
+      Seq(Row("NO")), "DataLoadingIUDTestCase_IUD-01-01-01_002-02")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table with where exists clause
+  test("IUD-01-01-01_002-03", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  X set (active_status) = ('NO') where exists (select 1 from default.t_carbn01b Y where Y.item_code = X.item_code)""").collect
+    checkAnswer(s"""select active_status from default.t_carbn01   group by active_status""",
+      Seq(Row("NO")), "DataLoadingIUDTestCase_IUD-01-01-01_002-03")
+     sql(s"""drop table default.t_carbn01""").collect
+  }
+
+
+  // Check for delete Carbon table without where clause
+  test("IUD-01-01-01_002-04", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Delete from default.t_carbn01 """).collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_002-04")
+     sql(s"""drop table default.t_carbn01 """).collect
+  }
+
+
+  // Check for delete Carbon table with where clause
+  test("IUD-01-01-01_002-05", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Delete from default.t_carbn01  where active_status = 'TRUE'""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where active_status='TRUE'""",
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_002-05")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for delete Carbon table with where exists clause
+  test("IUD-01-01-01_002-06", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Delete from default.t_carbn01  X where exists (select 1 from default.t_carbn01b Y where Y.item_code = X.item_code)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_002-06")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+  // Check for update Carbon table using query involving filters
+  test("IUD-01-01-01_003-03", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set ( a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd = 2)""").collect
+    checkAnswer(s"""select item_type_cd, profit from default.t_carbn01  limit 1""",
+      Seq(Row(2, 2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-03")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using query involving sub query
+  test("IUD-01-01-01_003-04", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set ( a.item_type_cd, a.Profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where a.item_type_cd = b.item_type_cd and b.item_type_cd in (select c.item_type_cd from t_carbn02 c where c.item_type_cd=2))""").collect
+    checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 order by item_type_cd limit 1""",
+      Seq(Row(1, 2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-04")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using query involving sub query
+  test("IUD-01-01-01_003-04_01", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.item_type_cd, a.Profit) = (select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd not in (select c.item_type_cd from t_carbn02 c where c.item_type_cd != 2) and a.item_type_cd = b.item_type_cd)""").collect
+    checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 order by item_type_cd limit 1""",
+      Seq(Row(1, 2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-04_01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using query involving Logical operation
+  test("IUD-01-01-01_003-05", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  A set (a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.profit > 1 AND b.item_type_cd <3 and a.item_type_cd = b.item_type_cd)""").collect
+    checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 order by item_type_cd limit 1""",
+      Seq(Row(1, 2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-05")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using query involving group by
+  test("IUD-01-01-01_003-06", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd =2)""").collect
+    checkAnswer(s"""select item_type_cd, profit from default.t_carbn01 limit 1""",
+      Seq(Row(2, 2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_003-06")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using inner join and filter condition on a table to pick only non duplicate records
+  test("IUD-01-01-01_003-07", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update t_carbn01 a set (a.active_status) = (select b.active_status from t_carbn01b b where a.item_type_cd = b.item_type_cd and b.item_code in (select item_code from t_carbn01b group by item_code, profit having count(*)>1))""").collect
+    checkAnswer(s"""select count(active_status) from t_carbn01 where active_status = 'true' limit 1""",
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_003-07")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using query involving max
+  test("IUD-01-01-01_004-01", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update t_carbn01  a set (a.item_type_cd) = ((select c.code from (select max(b.item_type_cd) as code  from t_carbn01b b) c))""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01 limit 1""",
+      Seq(Row(123)), "DataLoadingIUDTestCase_IUD-01-01-01_004-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table using query involving spark functions
+  test("IUD-01-01-01_004-02", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.create_date) = (select to_date(b.create_date) from default.t_carbn01b b where b.update_time = '2012-01-06 06:08:05.0')""").collect
+    checkAnswer(s"""select create_date from default.t_carbn01 limit 1""",
+      Seq(Row("2012-01-20")), "DataLoadingIUDTestCase_IUD-01-01-01_004-02")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for all data types using data values
+  test("IUD-01-01-01_004-03", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (active_status,item_type_cd,qty_day_avg,qty_total,sell_price,sell_pricep,discount_price,profit,item_code,item_name,outlet_name,update_time,create_date) = ('true',34,344,456,1,5.5,1.1,1.1,'hheh','gfhfhfdh','fghfdhdfh',current_timestamp,'01-10-1900') where item_code='ASD423ee'""").collect
+    checkAnswer(s"""select create_date from default.t_carbn01  where create_date = '01-10-1900' limit 1""",
+      Seq(Row("01-10-1900")), "DataLoadingIUDTestCase_IUD-01-01-01_004-03")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is havign numeric and target is having string value column for update
+  test("IUD-01-01-01_004-04", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.item_code) = (select b.sell_price from default.t_carbn01b b where b.sell_price=200000000003454300)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-04")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is havign numeric and target is having decimal value column for update
+  test("IUD-01-01-01_004-05", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.profit) = (select b.item_type_cd from default.t_carbn01b b where b.item_type_cd = 2 and b.active_status='TRUE' )""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  limit 1""",
+      Seq(Row(2.00)), "DataLoadingIUDTestCase_IUD-01-01-01_004-05")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having big int and target is having int value column for update
+  test("IUD-01-01-01_004-06", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  a set (a.item_type_cd) = (select b.sell_price from default.t_carbn01b b where b.sell_price=200000343430000000)""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having string and target is having numeric value column for update
+  test("IUD-01-01-01_004-07", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.item_code) = (select b.item_type_cd from default.t_carbn01b b where b.item_code='DE3423ee')""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  order by item_type_cd limit 1""",
+      Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_004-07")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having string and target is having decimal value column for update
+  test("IUD-01-01-01_004-08", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  a set (a.profit) = (select b.item_code from default.t_carbn01b b where b.item_code='DE3423ee')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having string and target is having timestamp column for update
+  test("IUD-01-01-01_004-09", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  a set (a.update_time) = (select b.item_code from default.t_carbn01b b where b.item_code='DE3423ee')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having decimal and target is having numeric column for update
+  test("IUD-01-01-01_004-10", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.item_type_cd) = (select b.profit from default.t_carbn01b b where b.profit=2.445)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-10")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having float and target is having numeric column for update
+  test("IUD-01-01-01_004-11", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.item_type_cd) = (select b.sell_pricep from default.t_carbn01b b where b.sell_pricep=11.5)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-11")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having float and target is having double column for update
+  test("IUD-01-01-01_004-12", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.discount_price) = (select b.sell_pricep from default.t_carbn01b b where b.sell_pricep=11.5)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-12")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where source table is having Decimal(4,3)   and target is having Decimal(3,2) column for update
+  test("IUD-01-01-01_004-13", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.profit) = (select b.profit*.2 from default.t_carbn01b b where b.profit=2.444)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-13")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table for all data types using query on a different table
+  test("IUD-01-01-01_004-14", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.Active_status,a.Item_type_cd,a.Qty_day_avg,a.Qty_total,a.Sell_price,a.Sell_pricep,a.Discount_price,a.Profit,a.Item_code,a.Item_name,a.Outlet_name,a.Update_time,a.Create_date) = (select b.Active_status,b.Item_type_cd,b.Qty_day_avg,b.Qty_total,b.Sell_price,b.Sell_pricep,b.Discount_price,b.Profit,b.Item_code,b.Item_name,b.Outlet_name,b.Update_time,b.Create_date from default.t_carbn01b b where b.Item_type_cd=2)""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_004-14")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where a update column is having a shared dictionary. Check dictionary file being updated.
+  test("IUD-01-01-01_005-11", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata TBLPROPERTIES("COLUMNPROPERTIES.Item_code.shared_column"="sharedFolder.Item_code")""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("Ram")), "DataLoadingIUDTestCase_IUD-01-01-01_005-11")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where a update column is measue and is defined with include ddictionary. Check dictionary file being updated.
+  test("IUD-01-01-01_005-12", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Item_type_cd INT, Profit DECIMAL(3,2))STORED AS carbondata """).collect
+   sql(s"""insert into default.t_carbn01  select item_type_cd, profit from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (100100)""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
+      Seq(Row(100100)), "DataLoadingIUDTestCase_IUD-01-01-01_005-12")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where a update column is dimension and is defined with exclude dictionary.
+  test("IUD-01-01-01_005-13", Include) {
+    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+    sql(s"""create table default.t_carbn01 (Item_type_cd INT, Profit DECIMAL(3,2))STORED AS carbondata """).collect
+    sql(s"""insert into default.t_carbn01  select item_type_cd, profit from default.t_carbn01b""").collect
+    val currProperty = CarbonProperties.getInstance().getProperty(CarbonCommonConstants
+      .CARBON_BAD_RECORDS_ACTION);
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL")
+    intercept[Exception] {
+      sql(s"""update default.t_carbn01  set (item_type_cd) = ('ASASDDD')""").collect
+      CarbonProperties.getInstance()
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, currProperty)
+    }
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, currProperty)
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where a update column is dimension and is defined with exclude dictionary.
+  test("IUD-01-01-01_005-14", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata """).collect
+   sql(s""" insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (Item_code) = ('Ram')""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("Ram")), "DataLoadingIUDTestCase_IUD-01-01-01_005-14")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for update Carbon table where a update column is dimension and is defined with exclude dictionary.
+  test("IUD-01-01-01_005-15", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata """).collect
+   sql(s""" insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (Item_code) = ('123')""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("123")), "DataLoadingIUDTestCase_IUD-01-01-01_005-15")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on data in multiple blocks
+  test("IUD-01-01-01_006-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_code) = ('Ram' ) where Item_code = 'RE3423ee'""").collect
+    sql(s"""select Item_code from default.t_carbn01  where Item_code = 'RE3423ee' group by item_code""").collect
+
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on data in multiple blocks
+  test("IUD-01-01-01_007-01", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = ('120') where Item_type_cd = '114'""").collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01   where item_type_cd = 120 group by item_type_cd""",
+      Seq(Row(120)), "DataLoadingIUDTestCase_IUD-01-01-01_007-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update using parquet table
+  test("IUD-01-01-02_022-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""drop table if exists T_Parq1""").collect
+   sql(s"""create table T_Parq1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(500), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE)stored as parquet""").collect
+   sql(s"""insert into T_Parq1 select * from t_hive1""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.Active_status,a.Item_type_cd,a.Qty_day_avg,a.Qty_total,a.Sell_price,a.Sell_pricep,a.Discount_price,a.Profit,a.Item_code,a.Item_name,a.Outlet_name,a.Update_time,a.Create_date) = (select b.Active_status,b.Item_type_cd,b.Qty_day_avg,b.Qty_total,b.Sell_price,b.Sell_pricep,b.Discount_price,b.Profit,b.Item_code,b.Item_name,b.Outlet_name,b.Update_time,b.Create_date from T_Parq1 b where a.item_type_cd = b.item_type_cd)""").collect
+    checkAnswer(s"""select profit from default.t_carbn01   group by profit""",
+      Seq(Row(2.44)), "DataLoadingIUDTestCase_IUD-01-01-02_022-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on carbon table using query on Parquet table
+  test("IUD-01-01-01_009-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""drop table if exists T_Parq1""").collect
+   sql(s"""create table T_Parq1(Active_status BOOLEAN, Item_type_cd TINYINT, Qty_day_avg SMALLINT, Qty_total INT, Sell_price BIGINT, Sell_pricep FLOAT, Discount_price DOUBLE , Profit DECIMAL(3,2), Item_code STRING, Item_name VARCHAR(500), Outlet_name CHAR(100), Update_time TIMESTAMP, Create_date DATE)stored as parquet""").collect
+   sql(s"""insert into T_Parq1 select * from t_hive1""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  a set (a.Active_status,a.Item_type_cd,a.Qty_day_avg,a.Qty_total,a.Sell_price,a.Sell_pricep,a.Discount_price,a.Profit,a.Item_code,a.Item_name,a.Outlet_name,a.Update_time,a.Create_date) = (select b.Active_status,b.Item_type_cd,b.Qty_day_avg,b.Qty_total,b.Sell_price,b.Sell_pricep,b.Discount_price,b.Profit,b.Item_code,b.Item_name,b.Outlet_name,b.Update_time,b.Create_date from T_Parq1 b where a.item_type_cd = b.item_type_cd)""").collect
+    checkAnswer(s"""select profit from default.t_carbn01   group by profit""",
+      Seq(Row(2.44)), "DataLoadingIUDTestCase_IUD-01-01-01_009-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on carbon table using incorrect data value
+  test("IUD-01-01-01_010-01", Include) {
+    intercept[Exception] {
+      sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+      sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""update default.t_carbn01  set Update_time = '11-11-2012 77:77:77') where item_code='ASD423ee')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check multiple updates on the same column - for correctness of data and horizontal compaction of delta file
+  test("IUD-01-01-02_001-02", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram')""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange') where item_code = 'Ram'""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("Orange")), "DataLoadingIUDTestCase_IUD-01-01-02_001-02")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for compaction of delta files within a segment working fine as per the configuration
+  test("IUD-01-01-02_003-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram')""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange') where item_code = 'Ram'""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("Orange")), "DataLoadingIUDTestCase_IUD-01-01-02_003-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check multiple updates on the same column - for correctness of data along with horizontal compaction of delta file
+  test("IUD-01-01-02_002-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram')""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange') where item_code = 'Ram'""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("Orange")), "DataLoadingIUDTestCase_IUD-01-01-02_002-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check multiple updates on the different column - for correctness of data and horizontal compaction of delta file
+  test("IUD-01-01-01_012-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01 set (Item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
+    checkAnswer(s"""select item_code from default.t_carbn01  group by item_code""",
+      Seq(Row("Banana")), "DataLoadingIUDTestCase_IUD-01-01-01_012-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check for delta files handling during table compaction and not breaking the data integrity
+  test("IUD-01-01-02_004-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') """).collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange') where Item_code = 'Ram'""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Ram') where Item_code = 'Orange'""").collect
+   sql(s"""ALTER TABLE T_Carbn01 COMPACT 'MINOR'""").collect
+   sql(s"""select item_code from default.t_carbn01  group by item_code""").collect
+    checkAnswer(s"""select item_code from t_carbn01  group by item_code""",
+      Seq(Row("Ram")), "DataLoadingIUDTestCase_IUD-01-01-02_004-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update by doing data insert before and after update also check data consistency, no residual file left in HDFS
+  test("IUD-01-01-02_006-01", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2 """).collect
+   sql(s"""insert into t_carbn01 select * from t_carbn01b""").collect
+    checkAnswer(s"""select count(profit) from default.t_carbn01""",
+      Seq(Row(20)), "DataLoadingIUDTestCase_IUD-01-01-02_006-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update by doing data load before and after update also check data consistency, no residual file left in HDFS
+  test("IUD-01-01-02_006-02", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2 """).collect
+   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
+   sql(s"""select count(*) from default.t_carbn01""").collect
+    checkAnswer(s"""select count(profit) from default.t_carbn01""",
+      Seq(Row(20)), "DataLoadingIUDTestCase_IUD-01-01-02_006-02")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Do a delete rows after update and see that the updated columns are deleted
+  test("IUD-01-01-02_006-12", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2 """).collect
+   sql(s"""delete from default.t_carbn01  where profit = 1.2 """).collect
+    sql(s"""select count(profit) from default.t_carbn01  where (profit=1.2) or (item_type_cd=2)  group by profit""").collect
+
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Do an update after delete rows and see that update is not done on the deleted rows(should not fethch those rows)
+  test("IUD-01-01-02_006-13", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.22) where item_type_cd = 2""").collect
+    sql(s"""select count(profit) from default.t_carbn01  where profit = 1.22 group by profit""").collect
+
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query with count(column) after update and esnure the correct count is fetched
+  test("IUD-01-01-01_014-01", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
+    checkAnswer(s"""select count(profit) from  default.t_carbn01  where profit=1.2 """,
+      Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_014-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query with count(*) after delete and esnure the correct count is fetched
+  test("IUD-01-01-01_014-02", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
+    checkAnswer(s"""select count(*) from  default.t_carbn01  where item_type_cd = 2""",
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-02")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query with a filter condition after update and esnure the correct count is fetched
+  test("IUD-01-01-01_014-03", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
+    checkAnswer(s"""select count(profit) from  default.t_carbn01  where profit=1.2""",
+      Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_014-03")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query with a filter condition after delete and esnure the correct count is fetched
+  test("IUD-01-01-01_014-04", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
+    checkAnswer(s"""select count(*) from  default.t_carbn01  where item_type_cd = 2""",
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-04")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select * on table after update operation and ensure the correct data is fetched
+  test("IUD-01-01-01_014-05", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  where profit = 1.2 group by profit""",
+      Seq(Row(1.20)), "DataLoadingIUDTestCase_IUD-01-01-01_014-05")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select (coumn) on table after update operation and ensure the correct data is fetched
+  test("IUD-01-01-01_014-06", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (profit) = (1.2) where item_type_cd = 2""").collect
+    checkAnswer(s"""select profit from default.t_carbn01  where profit = 1.2 group by profit""",
+      Seq(Row(1.20)), "DataLoadingIUDTestCase_IUD-01-01-01_014-06")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select * on table after delete operation and ensure the correct data is fetched
+  test("IUD-01-01-01_014-07", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""delete from default.t_carbn01 """).collect
+    checkAnswer(s"""select count(*) from  default.t_carbn01  """,
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-07")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select (coumn) on table after delete operation and ensure the correct data is fetched
+  test("IUD-01-01-01_014-08", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""delete from default.t_carbn01 """).collect
+    sql(s"""select profit from  default.t_carbn01 """).collect
+
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query joining another carbon table after update is done and check that correct data is fetched
+  test("IUD-01-01-01_014-09", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b """).collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (20) where item_type_cd in (2)""").collect
+    checkAnswer(s""" select c.item_type_cd from default.t_carbn01  c  where exists(select a.item_type_cd from default.t_carbn01  a, default.t_carbn01b b  where a.item_type_cd = b.item_type_cd)and c.item_type_cd = 20""",
+      Seq(Row(20)), "DataLoadingIUDTestCase_IUD-01-01-01_014-09")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query joining another carbon table after delete  is done and check that correct data is fetched
+  test("IUD-01-01-01_014-10", Include) {
+     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
+   sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from default.t_carbn01  where qty_day_avg < 4550""").collect
+    checkAnswer(s"""select a.qty_day_avg, a.item_code from default.t_carbn01  a, default.t_carbn01b b  where a.qty_day_avg = b.qty_day_avg """,
+      Seq(Row(4590, "ASD423ee")), "DataLoadingIUDTestCase_IUD-01-01-01_014-10")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query with limit condition after delete is done and check that correct data is fetched
+  test("IUD-01-01-01_014-15", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 2""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where qty_day_avg >= 4500 limit 3 """,
+      Seq(Row(4)), "DataLoadingIUDTestCase_IUD-01-01-01_014-15")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query when the data is distrbuted in multiple blocks(do multiple insert on the table) after an update and check the correct data is fetched
+  test("IUD-01-01-01_014-16", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (20) where item_type_cd < 10""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where item_type_cd < 10""",
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-16")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query when the data is distrbuted in single blocks(do single insert on the table and keep data small) after an update and check the correct data is fetched
+  test("IUD-01-01-01_014-17", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s""" update default.t_carbn01  set (item_type_cd) = (20) where item_type_cd < 10""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where item_type_cd =20""",
+      Seq(Row(4)), "DataLoadingIUDTestCase_IUD-01-01-01_014-17")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query when the data is distrbuted in multiple blocks(do multiple insert on the table) after an delete and check the correct data is fetched
+  test("IUD-01-01-01_014-18", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from default.t_carbn01   where item_type_cd < 10""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where item_type_cd < 10""",
+      Seq(Row(0)), "DataLoadingIUDTestCase_IUD-01-01-01_014-18")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check data consistency when select is executed after multiple updates on different columns
+  test("IUD-01-01-01_015-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (1111)""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (1111)""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (1111)""").collect
+   sql(s"""select item_code, item_type_cd from default.t_carbn01  group by item_code, item_type_cd""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where item_code = 'Banana'""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_015-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check data consistency when select is executed after multiple updates on same row and same columns
+  test("IUD-01-01-01_016-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
+   sql(s"""update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
+   sql(s"""update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Ram')""").collect
+   sql(s"""update default.t_carbn01  set (item_code) = ('Orange')""").collect
+    checkAnswer(s"""select count(*) from default.t_carbn01  where item_code = 'Orange'""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_016-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query on the updated column after multiple updates on the same column at different rows(control this using where condition) and enforce horizontal compaction and see that there is no data loss
+  test("IUD-01-01-01_016-02", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Orange')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (24523)""").collect
+   sql(s"""Update default.t_carbn01  set (Item_code) = ('Banana')""").collect
+   sql(s"""update default.t_carbn01  set (Item_type_cd) = (1111)""").collect
+   sql(s"""select count(*) from default.t_carbn01 """).collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_016-02")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query after multiple deletes(control this using where condition) and enforce horizontal compaction and see that there is no data loss
+  test("IUD-01-01-01_016-03", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 123 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 41 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 14 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 13 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 114 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 11 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 3 """).collect
+   sql(s"""delete from default.t_carbn01  where item_type_cd = 4 """).collect
+    checkAnswer(s"""select count(*) from default.t_carbn01 """,
+      Seq(Row(2)), "DataLoadingIUDTestCase_IUD-01-01-01_016-03")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run select query on the updated column after multiple updates on different column at different rows(control this using where condition) and enforce horizontal compaction and see that there is no data loss
+  test("IUD-01-01-01_016-04", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 123""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 41""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 14""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 13""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 114""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 11""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 3""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 4""").collect
+   sql(s"""update default.t_carbn01  set (item_type_cd) = (21) where item_type_cd = 2""").collect
+   sql(s"""select item_type_cd from default.t_carbn01  group by item_type_cd """).collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01 order by item_type_cd limit 1""",
+      Seq(Row(1)), "DataLoadingIUDTestCase_IUD-01-01-01_016-04")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run alternate update and insert and do a vertical compaction and see that there is no data loss
+  test("IUD-01-01-01_016-06", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""").collect
+    checkAnswer(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""",
+      Seq(Row("Orange", 60)), "DataLoadingIUDTestCase_IUD-01-01-01_016-06")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Do a delete after segment deletion and see that the delta files are not created in the deleted segmnet
+  test("IUD-01-01-02_006-15", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from table default.t_carbn01  where segment.id in (1) """).collect
+   sql(s"""delete from t_carbn01 where item_type_cd =14""").collect
+    checkAnswer(s"""select count(*)  from default.t_carbn01""",
+      Seq(Row(9)), "DataLoadingIUDTestCase_IUD-01-01-02_006-15")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Do an update after segment delete and see that delta files are not created in the deleted segments
+  test("IUD-01-01-02_006-14", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""delete from table default.t_carbn01  where segment.id in (1) """).collect
+   sql(s"""update t_carbn01 set (item_code) = ('Apple')""").collect
+    checkAnswer(s"""select count(*)  from default.t_carbn01 where item_code = 'Apple'""",
+      Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-02_006-14")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check data consistency when select is executed after update and delete segment
+  test("IUD-01-01-02_007-01", Include) {
+     sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Orange')""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""delete from table default.t_carbn01  where segment.id in (2) """).collect
+   sql(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""").collect()
+    checkAnswer(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""",
+      Seq(Row("Banana", 20)), "DataLoadingIUDTestCase_IUD-01-01-02_007-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check select after deleting segment and reloading and reupdating same data.
+  test("IUD-01-01-02_008-01", Include) {
+     sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""delete from table default.t_carbn01  where segment.id in (0)""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+    checkAnswer(s"""select item_code, count(*)  from default.t_carbn01  group by item_code""",
+      Seq(Row("Banana", 10)), "DataLoadingIUDTestCase_IUD-01-01-02_008-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Run 2 deletes on a table where update is done after data load - 1 block from load, 1 block from update in a segment(set detla threshold = 1).
+  test("IUD-01-01-02_009-01", Include) {
+     sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+    sql(s"""select item_type_cd from default.t_carbn01""").collect()
+   sql(s"""Update default.t_carbn01  set (item_code) = ('Banana')""").collect
+   sql(s"""delete from t_carbn01 where item_type_cd =2""").collect
+   sql(s"""delete from t_carbn01 where item_type_cd =14""").collect
+    checkAnswer(s"""select count(item_type_cd)  from default.t_carbn01""",
+      Seq(Row(8)), "DataLoadingIUDTestCase_IUD-01-01-02_009-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on carbon table where a column being updated with incorrect data type.
+  test("IUD-01-01-02_011-01", Include) {
+    intercept[Exception] {
+      sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""Update T_Carbn04 set (Item_type_cd) = ('Banana')""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on empty carbon table where a column being updated with incorrect data type.
+  test("IUD-01-01-01_022-01", Include) {
+     sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+   sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+   sql(s"""Update default.t_carbn01  set (item_type_cd) = (11) """).collect
+    checkAnswer(s"""select item_type_cd from default.t_carbn01  where item_type_cd=11 limit 1""",
+      Seq(Row(11)), "DataLoadingIUDTestCase_IUD-01-01-01_022-01")
+     sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update on carbon table where multiple values are returned in expression.
+  test("IUD-01-01-01_023-00", Include) {
+    intercept[Exception] {
+      sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
+      sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
+      sql(s"""Update default.t_carbn01  set Item_type_cd = (select Item_type_cd from default.t_carbn01b )""").collect
+      sql(s"""NA""").collect
+    }
+    sql(s"""drop table default.t_carbn01  """).collect
+  }
+
+
+  // Check update using case statement joiining 2 tables
+  test("IUD-01-01-02_023-01", Include) {
... 84401 lines suppressed ...