You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by zh...@apache.org on 2019/12/12 07:42:43 UTC

[carbondata] branch master updated: [CARBONDATA-3607] Remove batch_sort feature #3499

This is an automated email from the ASF dual-hosted git repository.

zhangzc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 2c0a1e0  [CARBONDATA-3607] Remove batch_sort feature #3499
2c0a1e0 is described below

commit 2c0a1e0241463110b10ccf65efce41fb7c8bb9d3
Author: Jacky Li <ja...@qq.com>
AuthorDate: Thu Dec 12 12:36:38 2019 +0700

    [CARBONDATA-3607] Remove batch_sort feature #3499
    
    Remove batch_sort feature
    
    This closes #3499
---
 .../core/constants/CarbonCommonConstants.java      |   11 -
 .../core/constants/CarbonLoadOptionConstants.java  |    7 -
 .../core/constants/SortScopeOptions.java           |    4 +-
 .../apache/carbondata/core/util/CarbonUtil.java    |    4 -
 .../apache/carbondata/core/util/SessionParams.java |    6 +-
 docs/configuration-parameters.md                   |    3 +-
 docs/ddl-of-carbondata.md                          |    1 -
 docs/dml-of-carbondata.md                          |    2 +-
 docs/sdk-guide.md                                  |    2 +-
 .../hadoop/api/CarbonTableOutputFormat.java        |   11 -
 .../sdv/generated/BatchSortLoad1TestCase.scala     |  292 ------
 .../sdv/generated/BatchSortLoad2TestCase.scala     |  282 ------
 .../sdv/generated/BatchSortLoad3TestCase.scala     |  182 ----
 .../sdv/generated/BatchSortQueryTestCase.scala     | 1043 --------------------
 .../CreateTableWithLocalDictionaryTestCase.scala   |    4 +-
 .../cluster/sdv/generated/GlobalSortTestCase.scala |   38 +-
 .../PartitionWithPreAggregateTestCase.scala        |   20 -
 .../cluster/sdv/generated/SDKwriterTestCase.scala  |    2 +-
 .../generated/SortColumnExcudeDictTestCase.scala   |    2 +-
 .../cluster/sdv/generated/SortColumnTestCase.scala |    2 +-
 .../sdv/generated/StandardPartitionTestCase.scala  |    8 -
 .../carbondata/cluster/sdv/suite/SDVSuites.scala   |    9 -
 .../createTable/TestCreateTableWithSortScope.scala |   16 +-
 .../testsuite/dataload/TestBatchSortDataLoad.scala |  212 ----
 .../LocalDictionarySupportCreateTableTest.scala    |    4 +-
 .../testsuite/sortcolumns/TestSortColumns.scala    |   95 +-
 .../sortcolumns/TestSortColumnsWithUnsafe.scala    |    2 +-
 .../StandardPartitionGlobalSortTestCase.scala      |   13 +-
 .../apache/carbondata/spark/util/CommonUtil.scala  |    2 +-
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala    |    3 +-
 .../org/apache/spark/sql/hive/CarbonSqlConf.scala  |   11 +-
 .../command/table/CarbonCreateTableCommand.scala   |    2 +-
 .../org/apache/spark/sql/hive/CarbonSQLConf.scala  |   10 -
 .../carbondata/DataLoadFailAllTypeSortTest.scala   |   12 +-
 .../carbondata/commands/SetCommandTestCase.scala   |   16 -
 .../restructure/AlterTableValidationTestCase.scala |    4 +-
 .../processing/loading/DataLoadProcessBuilder.java |   33 -
 .../processing/loading/model/CarbonLoadModel.java  |   14 -
 .../loading/model/CarbonLoadModelBuilder.java      |    1 -
 .../processing/loading/model/LoadOption.java       |   11 -
 .../processing/loading/sort/SorterFactory.java     |   13 -
 .../UnsafeBatchParallelReadMergeSorterImpl.java    |  348 -------
 .../loading/sort/unsafe/UnsafeSortDataRows.java    |   10 +-
 .../steps/DataWriterBatchProcessorStepImpl.java    |  195 ----
 .../processing/sort/sortdata/SortParameters.java   |   12 -
 .../processing/util/CarbonDataProcessorUtil.java   |   28 -
 .../carbondata/sdk/file/CarbonWriterBuilder.java   |    2 +-
 47 files changed, 70 insertions(+), 2934 deletions(-)

diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 7a4fa77..bd60a16 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -969,8 +969,6 @@ public final class CarbonCommonConstants {
   public static final String LOAD_SORT_SCOPE = "carbon.load.sort.scope";
 
   /**
-   * If set to BATCH_SORT, the sorting scope is smaller and more index tree will be created,
-   * thus loading is faster but query maybe slower.
    * If set to LOCAL_SORT, the sorting scope is bigger and one index tree per data node will be
    * created, thus loading is slower but query is faster.
    * If set to GLOBAL_SORT, the sorting scope is bigger and one index tree per task will be
@@ -979,15 +977,6 @@ public final class CarbonCommonConstants {
   public static final String LOAD_SORT_SCOPE_DEFAULT = "NO_SORT";
 
   /**
-   * Size of batch data to keep in memory, as a thumb rule it supposed
-   * to be less than 45% of sort.inmemory.size.inmb otherwise it may spill intermediate data to disk
-   */
-  @CarbonProperty
-  public static final String LOAD_BATCH_SORT_SIZE_INMB = "carbon.load.batch.sort.size.inmb";
-
-  public static final String LOAD_BATCH_SORT_SIZE_INMB_DEFAULT = "0";
-
-  /**
    * The Number of partitions to use when shuffling data for sort. If user don't configurate or
    * configurate it less than 1, it uses the number of map tasks as reduce tasks. In general, we
    * recommend 2-3 tasks per CPU core in your cluster.
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java
index 9547aba..8cb3f27 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonLoadOptionConstants.java
@@ -87,13 +87,6 @@ public final class CarbonLoadOptionConstants {
   public static final String CARBON_TABLE_LOAD_SORT_SCOPE = "carbon.table.load.sort.scope.";
 
   /**
-   * option to specify the batch sort size inmb
-   */
-  @CarbonProperty(dynamicConfigurable = true)
-  public static final String CARBON_OPTIONS_BATCH_SORT_SIZE_INMB =
-      "carbon.options.batch.sort.size.inmb";
-
-  /**
    * Option to enable/ disable single_pass
    */
   @CarbonProperty(dynamicConfigurable = true)
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/SortScopeOptions.java b/core/src/main/java/org/apache/carbondata/core/constants/SortScopeOptions.java
index fe7b4e9..6b9a2e9 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/SortScopeOptions.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/SortScopeOptions.java
@@ -27,8 +27,6 @@ public class SortScopeOptions {
       sortScope = CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT;
     }
     switch (sortScope.toUpperCase()) {
-      case "BATCH_SORT":
-        return SortScope.BATCH_SORT;
       case "LOCAL_SORT":
         return SortScope.LOCAL_SORT;
       case "GLOBAL_SORT":
@@ -41,7 +39,7 @@ public class SortScopeOptions {
   }
 
   public enum SortScope {
-    NO_SORT, BATCH_SORT, LOCAL_SORT, GLOBAL_SORT
+    NO_SORT, LOCAL_SORT, GLOBAL_SORT
   }
 }
 
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index ddd0491..df5a49e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -1829,12 +1829,8 @@ public final class CarbonUtil {
       return false;
     }
     switch (sortScope.toUpperCase()) {
-      case "BATCH_SORT":
-        return true;
       case "LOCAL_SORT":
-        return true;
       case "NO_SORT":
-        return true;
       case "GLOBAL_SORT":
         return true;
       default:
diff --git a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
index 481bdb1..151eef0 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
@@ -45,7 +45,6 @@ import static org.apache.carbondata.core.constants.CarbonCommonConstants.NUM_COR
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_ACTION;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORD_PATH;
-import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_DATEFORMAT;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_GLOBAL_SORT_PARTITIONS;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD;
@@ -172,10 +171,9 @@ public class SessionParams implements Serializable, Cloneable {
         isValid = CarbonUtil.isValidSortOption(value);
         if (!isValid) {
           throw new InvalidConfigurationException("The sort scope " + key
-              + " can have only either NO_SORT, BATCH_SORT, LOCAL_SORT or GLOBAL_SORT.");
+              + " can have only either NO_SORT, LOCAL_SORT or GLOBAL_SORT.");
         }
         break;
-      case CARBON_OPTIONS_BATCH_SORT_SIZE_INMB:
       case CARBON_OPTIONS_GLOBAL_SORT_PARTITIONS:
       case NUM_CORES_LOADING:
       case NUM_CORES_COMPACTING:
@@ -238,7 +236,7 @@ public class SessionParams implements Serializable, Cloneable {
           isValid = CarbonUtil.isValidSortOption(value);
           if (!isValid) {
             throw new InvalidConfigurationException("The sort scope " + key
-                + " can have only either NO_SORT, BATCH_SORT, LOCAL_SORT or GLOBAL_SORT.");
+                + " can have only either NO_SORT, LOCAL_SORT or GLOBAL_SORT.");
           }
         } else {
           throw new InvalidConfigurationException(
diff --git a/docs/configuration-parameters.md b/docs/configuration-parameters.md
index 847580c..736670e 100644
--- a/docs/configuration-parameters.md
+++ b/docs/configuration-parameters.md
@@ -63,8 +63,7 @@ This section provides the details of all the configurations required for the Car
 | carbon.number.of.cores.while.loading | 2 | Number of cores to be used while loading data. This also determines the number of threads to be used to read the input files (csv) in parallel.**NOTE:** This configured value is used in every data loading step to parallelize the operations. Configuring a higher value can lead to increased early thread pre-emption by OS and there by reduce the overall performance. |
 | enable.unsafe.sort | true | CarbonData supports unsafe operations of Java to avoid GC overhead for certain operations. This configuration enables to use unsafe functions in CarbonData. **NOTE:** For operations like data loading, which generates more short lived Java objects, Java GC can be a bottle neck. Using unsafe can overcome the GC overhead and improve the overall performance. |
 | enable.offheap.sort | true | CarbonData supports storing data in off-heap memory for certain operations during data loading and query. This helps to avoid the Java GC and thereby improve the overall performance. This configuration enables using off-heap memory for sorting of data during data loading.**NOTE:**  ***enable.unsafe.sort*** configuration needs to be configured to true for using off-heap |
-| carbon.load.sort.scope | LOCAL_SORT | CarbonData can support various sorting options to match the balance between load and query performance. LOCAL_SORT:All the data given to an executor in the single load is fully sorted and written to carbondata files. Data loading performance is reduced a little as the entire data needs to be sorted in the executor. BATCH_SORT:Sorts the data in batches of configured size and writes to carbondata files. Data loading performance increases as the entir [...]
-| carbon.load.batch.sort.size.inmb | 0 | When  ***carbon.load.sort.scope*** is configured as ***BATCH_SORT***, this configuration needs to be added to specify the batch size for sorting and writing to carbondata files. **NOTE:** It is recommended to keep the value around 45% of ***carbon.sort.storage.inmemory.size.inmb*** to avoid spill to disk. Also it is recommended to keep the value higher than ***carbon.blockletgroup.size.in.mb***. Refer to *carbon.load.sort.scope* for more informati [...]
+| carbon.load.sort.scope | LOCAL_SORT | CarbonData can support various sorting options to match the balance between load and query performance. LOCAL_SORT:All the data given to an executor in the single load is fully sorted and written to carbondata files. Data loading performance is reduced a little as the entire data needs to be sorted in the executor. GLOBAL SORT:Entire data in the data load is fully sorted and written to carbondata files. Data loading performance would get reduced as [...]
 | carbon.global.sort.rdd.storage.level | MEMORY_ONLY | Storage level to persist dataset of RDD/dataframe when loading data with 'sort_scope'='global_sort', if user's executor has less memory, set this parameter to 'MEMORY_AND_DISK_SER' or other storage level to correspond to different environment. [See detail](http://spark.apache.org/docs/latest/rdd-programming-guide.html#rdd-persistence). |
 | carbon.load.global.sort.partitions | 0 | The number of partitions to use when shuffling data for global sort. Default value 0 means to use same number of map tasks as reduce tasks. **NOTE:** In general, it is recommended to have 2-3 tasks per CPU core in your cluster. |
 | carbon.sort.size | 100000 | Number of records to hold in memory to sort and write intermediate sort temp files. **NOTE:** Memory required for data loading will increase if you turn this value bigger. Besides each thread will cache this amout of records. The number of threads is configured by *carbon.number.of.cores.while.loading*. |
diff --git a/docs/ddl-of-carbondata.md b/docs/ddl-of-carbondata.md
index 0660504..a7d3b4d 100644
--- a/docs/ddl-of-carbondata.md
+++ b/docs/ddl-of-carbondata.md
@@ -238,7 +238,6 @@ CarbonData DDL statements are documented here,which includes:
      
      * LOCAL_SORT: data will be locally sorted (task level sorting)             
      * NO_SORT: default scope. It will load the data in unsorted manner, it will significantly increase load performance.       
-     * BATCH_SORT: It increases the load performance but decreases the query performance if identified blocks > parallelism.
      * GLOBAL_SORT: It increases the query performance, especially high concurrent point query.
        And if you care about loading resources isolation strictly, because the system uses the spark GroupBy to sort data, the resource can be controlled by spark. 
 
diff --git a/docs/dml-of-carbondata.md b/docs/dml-of-carbondata.md
index 84c629c..33faa9c 100644
--- a/docs/dml-of-carbondata.md
+++ b/docs/dml-of-carbondata.md
@@ -117,7 +117,7 @@ CarbonData DML statements are documented here,which includes:
     Requirement: Sort Columns must be set while creating table. If Sort Columns is null, Sort Scope is always NO_SORT.
   
     ```
-    OPTIONS('SORT_SCOPE'='BATCH_SORT')
+    OPTIONS('SORT_SCOPE'='GLOBAL_SORT')
     ```
     
     Priority order for choosing Sort Scope is:
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index b8e9f51..357f297 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -378,7 +378,7 @@ public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
  * c. local_dictionary_threshold -- positive value, default is 10000
  * d. local_dictionary_enable -- true / false. Default is false
  * e. sort_columns -- comma separated column. "c1,c2". Default no columns are sorted.
- * j. sort_scope -- "local_sort", "no_sort", "batch_sort". default value is "no_sort"
+ * j. sort_scope -- "local_sort", "no_sort". default value is "no_sort"
  * k. long_string_columns -- comma separated string columns which are more than 32k length. 
  *                           default value is null.
  * l. inverted_index -- comma separated string columns for which inverted index needs to be
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
index 7700c8e..9a35fa9 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
@@ -86,8 +86,6 @@ public class CarbonTableOutputFormat extends FileOutputFormat<NullWritable, Obje
       "mapreduce.carbontable.empty.data.bad.record";
   public static final String SKIP_EMPTY_LINE = "mapreduce.carbontable.skip.empty.line";
   public static final String SORT_SCOPE = "mapreduce.carbontable.load.sort.scope";
-  public static final String BATCH_SORT_SIZE_INMB =
-      "mapreduce.carbontable.batch.sort.size.inmb";
   public static final String GLOBAL_SORT_PARTITIONS =
       "mapreduce.carbontable.global.sort.partitions";
   public static final String BAD_RECORD_PATH = "mapreduce.carbontable.bad.record.path";
@@ -391,15 +389,6 @@ public class CarbonTableOutputFormat extends FileOutputFormat<NullWritable, Obje
                 CarbonLoadOptionConstants.CARBON_OPTIONS_GLOBAL_SORT_PARTITIONS,
                 null)));
 
-    model.setBatchSortSizeInMb(
-        conf.get(
-            BATCH_SORT_SIZE_INMB,
-            carbonProperty.getProperty(
-                CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB,
-                carbonProperty.getProperty(
-                    CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-                    CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT))));
-
     String badRecordsPath = conf.get(BAD_RECORD_PATH);
     if (StringUtils.isEmpty(badRecordsPath)) {
       badRecordsPath =
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala
deleted file mode 100644
index d301218..0000000
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad1TestCase.scala
+++ /dev/null
@@ -1,292 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.cluster.sdv.generated
-
-import org.apache.spark.sql.common.util._
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
- * Test Class for BatchSortLoad1TestCase to verify all scenerios
- */
-
-class BatchSortLoad1TestCase extends QueryTest with BeforeAndAfterAll {
-         
-
-  //To load data after setting sort scope and sort size in carbon property file
-  test("Batch_sort_Loading_001-01-01-01_001-TC_001", Include) {
-     sql(s"""drop table if exists uniqdata11""").collect
-   sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata11""").collect
-
-     sql(s"""drop table uniqdata11""").collect
-  }
-
-
-  //To load 1 lac data load after setting sort scope and sort size in carbon property file
-  test("Batch_sort_Loading_001-01-01-01_001-TC_002", Include) {
-    sql(s"""drop table if exists uniqdata12""").collect
-     sql(s"""CREATE TABLE uniqdata12 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/1lac_UniqData.csv' into table uniqdata12 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata12""").collect
-
-     sql(s"""drop table uniqdata12""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with option file header in load
-  test("Batch_sort_Loading_001-01-01-01_001-TC_003", Include) {
-     sql(s"""CREATE TABLE uniqdata12a(CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata12a OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata12a""").collect
-
-     sql(s"""drop table uniqdata12a""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file without folder path in load
-  test("Batch_sort_Loading_001-01-01-01_001-TC_004", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
-      sql(s"""LOAD DATA  into table uniqdata13 OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    }
-    sql(s"""drop table uniqdata13""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file without table_name in load
-  test("Batch_sort_Loading_001-01-01-01_001-TC_005", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
-      sql(s"""LOAD DATA  INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    }
-    sql(s"""drop table uniqdata14""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with option QUOTECHAR'='"'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_006", Include) {
-     sql(s"""CREATE TABLE uniqdata15 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata15 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata15""").collect
-
-     sql(s"""drop table uniqdata15""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS('COMMENTCHAR'='#')
-
-  test("Batch_sort_Loading_001-01-01-01_001-TC_007", Include) {
-     sql(s"""CREATE TABLE uniqdata16 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata16 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata16""").collect
-
-     sql(s"""drop table uniqdata16""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with option 'MULTILINE'='true'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_008", Include) {
-     sql(s"""CREATE TABLE uniqdata17 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata17""").collect
-
-     sql(s"""drop table uniqdata17""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS('ESCAPECHAR'='\')
-  test("Batch_sort_Loading_001-01-01-01_001-TC_009", Include) {
-     sql(s"""CREATE TABLE uniqdata18 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata18 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata18""").collect
-
-     sql(s"""drop table uniqdata18""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='FORCE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_010", Include) {
-     sql(s"""CREATE TABLE uniqdata19b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19b OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19b""").collect
-
-     sql(s"""drop table uniqdata19b""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='IGNORE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_011", Include) {
-     sql(s"""CREATE TABLE uniqdata19c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19c OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19c""").collect
-
-     sql(s"""drop table uniqdata19c""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='REDIRECT'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_012", Include) {
-     sql(s"""CREATE TABLE uniqdata19d (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19d OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19d""").collect
-
-     sql(s"""drop table uniqdata19d""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS 'BAD_RECORDS_LOGGER_ENABLE'='FALSE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_013", Include) {
-     sql(s"""CREATE TABLE uniqdata19e (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19e OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19e""").collect
-
-     sql(s"""drop table uniqdata19e""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS 'BAD_RECORDS_LOGGER_ENABLE'='TRUE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_014", Include) {
-     sql(s"""CREATE TABLE uniqdata19f (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19f OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19f""").collect
-
-     sql(s"""drop table uniqdata19f""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS ‘SINGLE_PASS’=’true’
-  test("Batch_sort_Loading_001-01-01-01_001-TC_015", Include) {
-     sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','SINGLE_PASS'='TRUE','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20a""").collect
-
-     sql(s"""drop table uniqdata20a""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with OPTIONS ‘SINGLE_PASS’=’false’
-  test("Batch_sort_Loading_001-01-01-01_001-TC_016", Include) {
-     sql(s"""CREATE TABLE uniqdata20b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20b OPTIONS('DELIMITER'=',','QUOTECHAR'='"','SINGLE_PASS'='FALSE','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20b""").collect
-
-     sql(s"""drop table uniqdata20b""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with NO_INVERTED_INDEX
-  test("Batch_sort_Loading_001-01-01-01_001-TC_017", Include) {
-     sql(s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('NO_INVERTED_INDEX'='CUST_NAME')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20c OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20c""").collect
-
-     sql(s"""drop table uniqdata20c""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with COLUMNDICT
-  test("Batch_sort_Loading_001-01-01-01_001-TC_018", Include) {
-     sql(s"""drop table if exists t3""").collect
-   sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('COLUMNDICT'='country:$resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect
-     sql(s"""drop table if exists t3""").collect
-  }
-
-
-  //To load data after setting sort scope and sort size in carbon property file with ALL_DICTIONARY_PATH
-  test("Batch_sort_Loading_001-01-01-01_001-TC_019", Include) {
-    sql(s"""drop table if exists t3""").collect
-    intercept[Exception] {
-      sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
-      sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('ALL_DICTIONARY_PATH'='resourcesPath/Data/batchsort/data.dictionary')""").collect
-    }
-    sql(s"""drop table t3""").collect
-  }
-
-
-  //To check incremental load one with batch_sort
-  test("Batch_sort_Loading_001-01-01-01_001-TC_021", Include) {
-     sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20a""").collect
-
-     sql(s"""drop table uniqdata20a""").collect
-  }
-
-
-  //To check sort_scope option with a wrong value
-  test("Batch_sort_Loading_001-01-01-01_001-TC_023", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='ABCXYZ')""").collect
-    }
-  }
-
-
-  //To check sort_scope option with null value
-  test("Batch_sort_Loading_001-01-01-01_001-TC_024", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='null')""").collect
-    }
-  }
-
-  val prop = CarbonProperties.getInstance()
-  val p1 = prop.getProperty("carbon.load.sort.scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
-  val p2 = prop.getProperty("carbon.batch.sort.size.inmb", CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT)
-  val p3 = prop.getProperty("enable.unsafe.sort", CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT)
-  val p4 = prop.getProperty("offheap.sort.chunk.size.inmb", CarbonCommonConstants.OFFHEAP_SORT_CHUNK_SIZE_IN_MB_DEFAULT)
-  val p5 = prop.getProperty("sort.inmemory.size.inmb", CarbonCommonConstants.IN_MEMORY_FOR_SORT_DATA_IN_MB_DEFAULT)
-
-  override protected def beforeAll() {
-    // Adding new properties
-    prop.addProperty("carbon.load.sort.scope", "batch_sort")
-    prop.addProperty("carbon.batch.sort.size.inmb", "1")
-    prop.addProperty("enable.unsafe.sort", "true")
-    prop.addProperty("offheap.sort.chunk.size.inmb", "128")
-    prop.addProperty("sort.inmemory.size.inmb", "1024")
-  }
-
-  override def afterAll: Unit = {
-    //Reverting to old
-    prop.addProperty("carbon.load.sort.scope", p1)
-    prop.addProperty("carbon.batch.sort.size.inmb", p2)
-    prop.addProperty("enable.unsafe.sort", p3)
-    prop.addProperty("offheap.sort.chunk.size.inmb", p4)
-    prop.addProperty("sort.inmemory.size.inmb", p5)
-  }
-       
-}
\ No newline at end of file
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala
deleted file mode 100644
index d3ff6aa..0000000
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad2TestCase.scala
+++ /dev/null
@@ -1,282 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.cluster.sdv.generated
-
-import org.apache.spark.sql.common.util._
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
- * Test Class for BatchSortLoad2TestCase to verify all scenerios
- */
-
-class BatchSortLoad2TestCase extends QueryTest with BeforeAndAfterAll {
-         
-
-  //To load data after setting only sort scope in carbon property file
-  test("Batch_sort_Loading_001-01-01-01_001-TC_027", Include) {
-     sql(s"""drop table if exists uniqdata11""").collect
-   sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata11""").collect
-
-     sql(s"""drop table uniqdata11""").collect
-  }
-
-
-  //To load 1 lac data load after setting only sort scope in carbon property file
-  test("Batch_sort_Loading_001-01-01-01_001-TC_028", Include) {
-    sql(s"""drop table if exists uniqdata12""").collect
-     sql(s"""CREATE TABLE uniqdata12 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/1lac_UniqData.csv' into table uniqdata12 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata12""").collect
-
-     sql(s"""drop table uniqdata12""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with option file header in load
-  test("Batch_sort_Loading_001-01-01-01_001-TC_029", Include) {
-    sql(s"""drop table if exists uniqdata12a""").collect
-     sql(s"""CREATE TABLE uniqdata12a(CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata12a OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata12a""").collect
-
-     sql(s"""drop table uniqdata12a""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file without folder path in load
-  test("Batch_sort_Loading_001-01-01-01_001-TC_030", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata13 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
-      sql(s"""LOAD DATA  into table uniqdata13 OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    }
-    sql(s"""drop table uniqdata13""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file without table_name in load
-  test("Batch_sort_Loading_001-01-01-01_001-TC_031", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata14 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
-      sql(s"""LOAD DATA  INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table OPTIONS('DELIMITER'=',' , 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    }
-    sql(s"""drop table uniqdata14""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with option QUOTECHAR'='"'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_032", Include) {
-     sql(s"""CREATE TABLE uniqdata15 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata15 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata15""").collect
-
-     sql(s"""drop table uniqdata15""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS('COMMENTCHAR'='#')
-
-  test("Batch_sort_Loading_001-01-01-01_001-TC_033", Include) {
-     sql(s"""CREATE TABLE uniqdata16 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata16 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata16""").collect
-
-     sql(s"""drop table uniqdata16""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with option 'MULTILINE'='true'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_034", Include) {
-     sql(s"""CREATE TABLE uniqdata17 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata17""").collect
-
-     sql(s"""drop table uniqdata17""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS('ESCAPECHAR'='\')
-  test("Batch_sort_Loading_001-01-01-01_001-TC_035", Include) {
-     sql(s"""CREATE TABLE uniqdata18 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata18 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata18""").collect
-
-     sql(s"""drop table uniqdata18""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='FORCE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_036", Include) {
-     sql(s"""CREATE TABLE uniqdata19b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19b OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19b""").collect
-
-     sql(s"""drop table uniqdata19b""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='IGNORE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_037", Include) {
-     sql(s"""CREATE TABLE uniqdata19c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19c OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19c""").collect
-
-     sql(s"""drop table uniqdata19c""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_ACTION'='REDIRECT'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_038", Include) {
-     sql(s"""CREATE TABLE uniqdata19d (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19d OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19d""").collect
-
-     sql(s"""drop table uniqdata19d""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_LOGGER_ENABLE'='FALSE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_039", Include) {
-     sql(s"""CREATE TABLE uniqdata19e (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19e OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19e""").collect
-
-     sql(s"""drop table uniqdata19e""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS 'BAD_RECORDS_LOGGER_ENABLE'='TRUE'
-  test("Batch_sort_Loading_001-01-01-01_001-TC_040", Include) {
-     sql(s"""CREATE TABLE uniqdata19f (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata19f OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata19f""").collect
-
-     sql(s"""drop table uniqdata19f""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS ‘SINGLE_PASS’=’true’
-  test("Batch_sort_Loading_001-01-01-01_001-TC_041", Include) {
-     sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','SINGLE_PASS'='TRUE','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20a""").collect
-
-     sql(s"""drop table uniqdata20a""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with OPTIONS ‘SINGLE_PASS’=’false’
-  test("Batch_sort_Loading_001-01-01-01_001-TC_042", Include) {
-     sql(s"""CREATE TABLE uniqdata20b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20b OPTIONS('DELIMITER'=',','QUOTECHAR'='"','SINGLE_PASS'='FALSE','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20b""").collect
-
-     sql(s"""drop table uniqdata20b""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with NO_INVERTED_INDEX
-  test("Batch_sort_Loading_001-01-01-01_001-TC_043", Include) {
-     sql(s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('NO_INVERTED_INDEX'='CUST_NAME')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20c OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdata20c""").collect
-
-     sql(s"""drop table uniqdata20c""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with COLUMNDICT
-  test("Batch_sort_Loading_001-01-01-01_001-TC_044", Include) {
-     sql(s"""drop table if exists t3""").collect
-   sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('COLUMNDICT'='country:$resourcesPath/Data/columndict/country.csv', 'SINGLE_PASS'='true')""").collect
-     sql(s"""drop table t3""").collect
-  }
-
-
-  //To load data after setting only sort scope in carbon property file with ALL_DICTIONARY_PATH
-  test("Batch_sort_Loading_001-01-01-01_001-TC_045", Include) {
-    sql(s"""drop table if exists t3""").collect
-     sql(s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String, serialname String, salary Int,floatField float) STORED BY 'carbondata'""").collect
-    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options('ALL_DICTIONARY_PATH'='$resourcesPath/Data/columndict/data.dictionary', 'SINGLE_PASS'='true')""").collect
-     sql(s"""drop table t3""").collect
-  }
-
-
-  //To check incremental load one with batch_sort
-  test("Batch_sort_Loading_001-01-01-01_001-TC_047", Include) {
-     sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES ('SORT_SCOPE'='BATCH_SORT')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-   sql(s"""select * from uniqdata20a""").collect
-   sql(s"""drop table uniqdata20a""").collect
-  }
-
-
-  //To check sort_scope option with a wrong value
-  test("Batch_sort_Loading_001-01-01-01_001-TC_049", Include) {
-    intercept[Exception] {
-     sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-      sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='ABCXYZ',‘SINGLE_PASS’=’true’,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    }
-     sql(s"""drop table uniqdata20a""").collect
-  }
-
-
-  //To check sort_scope option with null value
-  test("Batch_sort_Loading_001-01-01-01_001-TC_050", Include) {
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-      sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='null',‘SINGLE_PASS’=’true’,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    }
-    sql(s"""drop table uniqdata20a""").collect
-  }
-
-  val prop = CarbonProperties.getInstance()
-  val p1 = prop.getProperty("carbon.load.sort.scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
-
-  override protected def beforeAll() {
-    // Adding new properties
-    prop.addProperty("carbon.load.sort.scope", "batch_sort")
-  }
-
-  override def afterAll: Unit = {
-    //Reverting to old
-    prop.addProperty("carbon.load.sort.scope", p1)
-  }
-       
-}
\ No newline at end of file
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala
deleted file mode 100644
index 14045a6..0000000
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortLoad3TestCase.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.cluster.sdv.generated
-
-import org.apache.spark.sql.common.util._
-import org.scalatest.BeforeAndAfterAll
-
-/**
- * Test Class for BatchSortLoad3TestCase to verify all scenerios
- */
-
-class BatchSortLoad3TestCase extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll(): Unit = {
-    sql(s"""drop table if exists uniqdata20c""").collect
-    sql(s"""drop table if exists uniqdata19c""").collect
-  }
-//Batch_sort_Loading_001-01-01-01_001-TC_020
-  test("Batch_sort_Loading_001-01-01-01_001-TC_020", Include) {
-    sql(
-      s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string,
-         |DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,
-         |DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double,
-         |Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'
-         | TBLPROPERTIES('NO_INVERTED_INDEX'='CUST_NAME')""".stripMargin.replaceAll(System
-        .lineSeparator, "")).collect
-    sql(
-      s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into
-         | table uniqdata20c OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','COMMENTCHAR'='#',
-         | 'MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT',
-         | 'BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,
-         | DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,
-         | Double_COLUMN2,INTEGER_COLUMN1')""".stripMargin.replaceAll(System
-        .lineSeparator, "")).collect
-
-    sql(s"""select * from uniqdata20c""").collect
-    sql(s"""drop table  if exists uniqdata20c""").collect
-
-  }
-
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_046
-  test("Batch_sort_Loading_001-01-01-01_001-TC_046", Include) {
-    sql(
-      s"""CREATE TABLE uniqdata19c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string,
-        DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,
-        DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double,
-        Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""".stripMargin
-        .replaceAll(System.lineSeparator, "")).collect
-
-    sql(
-      s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table
-         | uniqdata19c OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#',
-         | 'MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='IGNORE',
-         | 'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,
-         | BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,
-         | Double_COLUMN2,INTEGER_COLUMN1')""".stripMargin.replaceAll(System.lineSeparator, "")).collect
-
-    sql(s"""select * from uniqdata19c""").collect
-    sql(s"""drop table if exists uniqdata19c""").collect
-
-  }
-
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_053
-  test("Batch_sort_Loading_001-01-01-01_001-TC_053", Include) {
-    sql(
-      s"""drop table if exists t3""").collect
-    sql(
-      s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String,
-         |serialname String,salary Int,floatField float)
-         | STORED BY 'carbondata'""".stripMargin.replaceAll(System.lineSeparator, "")).collect
-
-    sql(
-      s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv' into table t3 options(
-         |'COLUMNDICT'='country:$resourcesPath/Data/columndict/country.csv','single_pass'='true')"""
-        .stripMargin.replaceAll(System.lineSeparator, "")).collect
-
-    sql(s"""drop table if exists t3""").collect
-  }
-
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_054
-  test("Batch_sort_Loading_001-01-01-01_001-TC_054", Include) {
-    sql(s"""drop table if exists t3""").collect
-    sql(
-      s"""CREATE TABLE t3 (ID Int, country String, name String, phonetype String,
-         |serialname String, salary Int,floatField float)
-         | STORED BY 'carbondata'""".stripMargin.replaceAll(System.lineSeparator, "")).collect
-
-    sql(
-      s"""LOAD DATA LOCAL INPATH '$resourcesPath/Data/batchsort/data.csv'
-         | into table t3 options('ALL_DICTIONARY_PATH'=
-         | '$resourcesPath/Data/columndict/data.dictionary','single_pass'='true')""".stripMargin
-        .replaceAll(System.lineSeparator, "")).collect
-    sql(s"""drop table if exists t3""").collect
-  }
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_057
-  test("Batch_sort_Loading_001-01-01-01_001-TC_057", Include) {
-    sql(s"""drop table if exists uniqdata20a""").collect
-    sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
-    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table  uniqdata20a OPTIONS('DELIMITER'=',','QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""".stripMargin).collect
-
-    sql(s"""alter table uniqdata20a compact 'minor'""").collect
-    sql(s"""drop table if exists  uniqdata20a""").collect
-  }
-
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_058
-  test("Batch_sort_Loading_001-01-01-01_001-TC_058", Include) {
-    sql(s"""drop table if exists uniqdata20a""").collect
-    sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-    intercept[Exception] {
-      sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='ABCXYZ',‘SINGLE_PASS’=’true’,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-      sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a_hive """).collect
-    }
-    sql(s"""drop table if exists uniqdata20a""").collect
-  }
-
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_059
-  test("Batch_sort_Loading_001-01-01-01_001-TC_059", Include) {
-    sql(s"""drop table if exists uniqdata20a""").collect
-    intercept[Exception] {
-      sql(s"""CREATE TABLE uniqdata20a (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-
-      sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a OPTIONS('DELIMITER'=',' , 'SORT_SCOPE'='null',‘SINGLE_PASS’=’true’,'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-      sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20a_hive """).collect
-    }
-    sql(s"""drop table if exists  uniqdata20a""").collect
-  }
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_060
-  test("Batch_sort_Loading_001-01-01-01_001-TC_060", Include) {
-    sql(s"""drop table if exists uniqdata20b""").collect
-    sql(s"""drop table if exists uniqdata20c""").collect
-    sql(s"""CREATE TABLE uniqdata20b (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata20b OPTIONS('DELIMITER'=',','QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""CREATE TABLE uniqdata20c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-    sql(s"""insert into uniqdata20c select * from uniqdata20b""")
-    sql(s"""drop table if exists  uniqdata20b""").collect
-    sql(s"""drop table if exists  uniqdata20c""").collect
-  }
-
-
-  //Batch_sort_Loading_001-01-01-01_001-TC_061
-  test("Batch_sort_Loading_001-01-01-01_001-TC_061", Include) {
-    sql(s"""drop TABLE if exists uniqdata_h""").collect
-    sql(s"""drop TABLE if exists uniqdata_c""").collect
-    sql(s"""CREATE TABLE uniqdata_h (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','""").collect
-    sql(s"""load data inpath '$resourcesPath/Data/uniqdata/7000_UniqData_hive2.csv' into table uniqdata_h""").collect
-    sql(s"""CREATE TABLE uniqdata_c (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
-    sql(s"""insert into uniqdata_c select * from uniqdata_h""")
-    sql(s"""drop table if exists  uniqdata_h""").collect
-    sql(s"""drop table if exists  uniqdata_c""").collect
-  }
-
-  override def afterAll {
-  }
-}
\ No newline at end of file
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala
deleted file mode 100644
index 11b060a..0000000
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BatchSortQueryTestCase.scala
+++ /dev/null
@@ -1,1043 +0,0 @@
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.cluster.sdv.generated
-
-import org.apache.spark.sql.common.util._
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.{CarbonCommonConstants, CarbonV3DataFormatConstants}
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
- * Test Class for BatchSortQueryTestCase to verify all scenerios
- */
-
-class BatchSortQueryTestCase extends QueryTest with BeforeAndAfterAll {
-         
-
-  //To check select query with limit
-  test("Batch_sort_Querying_001-01-01-01_001-TC_001", Include) {
-     sql(s"""drop table if exists uniqdataquery1""").collect
-   sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
-
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select * from uniqdataquery1 limit 100""").collect
-
-  }
-
-
-  //To check select query with limit as string
-  test("Batch_sort_Querying_001-01-01-01_001-TC_002", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 limit """"").collect
-    }
-  }
-
-
-  //To check select query with no input given at limit
-  test("Batch_sort_Querying_001-01-01-01_001-TC_003", Include) {
-
-    sql(s"""select * from uniqdataquery1 limit""").collect
-  }
-
-
-  //To check select count  query  with where and group by clause
-  test("Batch_sort_Querying_001-01-01-01_001-TC_004", Include) {
-
-    sql(s"""select count(*) from uniqdataquery1 where cust_name="CUST_NAME_00000" group by cust_name""").collect
-
-
-  }
-
-
-  //To check select count  query   and group by  cust_name using like operator
-  test("Batch_sort_Querying_001-01-01-01_001-TC_005", Include) {
-
-    sql(s"""select count(*) from uniqdataquery1 where cust_name like "cust_name_0%" group by cust_name""").collect
-
-
-  }
-
-
-  //To check select count  query   and group by  name using IN operator with empty values
-  test("Batch_sort_Querying_001-01-01-01_001-TC_006", Include) {
-
-    sql(s"""select count(*) from uniqdataquery1 where cust_name IN("","") group by cust_name""").collect
-
-
-  }
-
-
-  //To check select count  query   and group by  name using IN operator with specific  values
-  test("Batch_sort_Querying_001-01-01-01_001-TC_007", Include) {
-
-    sql(s"""select count(*) from uniqdataquery1 where cust_name IN(1,2,3) group by cust_name""").collect
-
-
-  }
-
-
-  //To check select distinct query
-  test("Batch_sort_Querying_001-01-01-01_001-TC_008", Include) {
-
-    sql(s"""select distinct cust_name from uniqdataquery1 group by cust_name""").collect
-
-
-  }
-
-
-  //To check where clause with OR and no operand
-  test("Batch_sort_Querying_001-01-01-01_001-TC_009", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id > 1 OR """).collect
-    }
-  }
-
-
-  //To check OR clause with LHS and RHS having no arguments
-  test("Batch_sort_Querying_001-01-01-01_001-TC_010", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where OR """).collect
-    }
-  }
-
-
-  //To check OR clause with LHS having no arguments
-  test("Batch_sort_Querying_001-01-01-01_001-TC_011", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where OR cust_id > "1"""").collect
-    }
-  }
-
-
-  //To check incorrect query
-  test("Batch_sort_Querying_001-01-01-01_001-TC_013", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id > 0 OR name  """).collect
-    }
-  }
-
-
-  //To check select query with rhs false
-  test("Batch_sort_Querying_001-01-01-01_001-TC_014", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id > 9005 OR false""").collect
-
-
-  }
-
-
-  //To check count on multiple arguments
-  test("Batch_sort_Querying_001-01-01-01_001-TC_015", Include) {
-
-    sql(s"""select count(cust_id,cust_name) from uniqdataquery1 where cust_id > 10544""").collect
-
-
-  }
-
-
-  //To check count with no argument
-  test("Batch_sort_Querying_001-01-01-01_001-TC_016", Include) {
-
-    sql(s"""select count() from uniqdataquery1 where cust_id > 10544""").collect
-
-
-  }
-
-
-  //To check count with * as an argument
-  test("Batch_sort_Querying_001-01-01-01_001-TC_017", Include) {
-
-    sql(s"""select count(*) from uniqdataquery1 where cust_id>10544""").collect
-
-
-  }
-
-
-  //To check select count query execution with entire column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_018", Include) {
-
-    sql(s"""select count(*) from uniqdataquery1""").collect
-
-
-  }
-
-
-  //To check select distinct query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_019", Include) {
-
-    sql(s"""select distinct * from uniqdataquery1""").collect
-
-
-  }
-
-
-  //To check select multiple column query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_020", Include) {
-
-    sql(s"""select cust_name,cust_id,count(cust_name) from uniqdataquery1 group by cust_name,cust_id""").collect
-
-
-  }
-
-
-  //To check select count and distinct query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_021", Include) {
-    intercept[Exception] {
-      sql(s"""select count(cust_id),distinct(cust_name) from uniqdataquery1""").collect
-    }
-  }
-
-
-  //To check sum query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_022", Include) {
-
-    sql(s"""select sum(cust_id) as sum,cust_name from uniqdataquery1 group by cust_name""").collect
-
-
-  }
-
-
-  //To check sum of names query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_023", Include) {
-
-    sql(s"""select sum(cust_name) from uniqdataquery1""").collect
-
-
-  }
-
-
-  //To check select distinct and groupby query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_024", Include) {
-
-    sql(s"""select distinct(cust_name,cust_id) from uniqdataquery1 group by cust_name,cust_id""").collect
-
-
-  }
-
-
-  //To check select with where clause on cust_name query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_025", Include) {
-
-    sql(s"""select cust_id from uniqdataquery1 where cust_name="cust_name_00000"""").collect
-
-
-  }
-
-
-  //To check query execution with IN operator without paranthesis
-  test("Batch_sort_Querying_001-01-01-01_001-TC_027", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id IN 9000,9005""").collect
-    }
-  }
-
-
-  //To check query execution with IN operator with paranthesis
-  test("Batch_sort_Querying_001-01-01-01_001-TC_028", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id IN (9000,9005)""").collect
-
-
-  }
-
-
-  //To check query execution with IN operator with out specifying any field.
-  test("Batch_sort_Querying_001-01-01-01_001-TC_029", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where IN(1,2)""").collect
-    }
-  }
-
-
-  //To check OR with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_030", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id>9005 or cust_id=9005""").collect
-
-
-  }
-
-
-  //To check OR with boolean expression
-  test("Batch_sort_Querying_001-01-01-01_001-TC_031", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id>9005 or false""").collect
-
-
-  }
-
-
-  //To check AND with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_032", Include) {
-
-    sql(s"""select * from uniqdataquery1 where true AND true""").collect
-
-
-  }
-
-
-  //To check AND with using booleans
-  test("Batch_sort_Querying_001-01-01-01_001-TC_033", Include) {
-
-    sql(s"""select * from uniqdataquery1 where true AND false""").collect
-
-
-  }
-
-
-  //To check AND with using booleans in invalid syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_034", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where AND true""").collect
-    }
-  }
-
-
-  //To check AND Passing two conditions on same input
-  test("Batch_sort_Querying_001-01-01-01_001-TC_035", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id=6 and cust_id>5""").collect
-
-
-  }
-
-
-  //To check AND changing case
-  test("Batch_sort_Querying_001-01-01-01_001-TC_036", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id=6 aND cust_id>5""").collect
-
-
-  }
-
-
-  //To check AND using 0 and 1 treated as boolean values
-  test("Batch_sort_Querying_001-01-01-01_001-TC_037", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where true aNd 0""").collect
-    }
-  }
-
-
-  //To check AND on two columns
-  test("Batch_sort_Querying_001-01-01-01_001-TC_038", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id=9000 and cust_name='cust_name_00000'""").collect
-
-
-  }
-
-
-  //To check '='operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_039", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id=9000 and cust_name='cust_name_00000' and ACTIVE_EMUI_VERSION='ACTIVE_EMUI_VERSION_00000'""").collect
-
-
-  }
-
-
-  //To check '='operator without Passing any value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_040", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id=""").collect
-    }
-  }
-
-
-  //To check '='operator without Passing columnname and value.
-  test("Batch_sort_Querying_001-01-01-01_001-TC_041", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where =""").collect
-    }
-  }
-
-
-  //To check '!='operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_042", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id!=9000""").collect
-
-
-  }
-
-
-  //To check '!='operator by keeping space between them
-  test("Batch_sort_Querying_001-01-01-01_001-TC_043", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id !   = 9001""").collect
-    }
-  }
-
-
-  //To check '!='operator by Passing boolean value whereas column expects an integer
-  test("Batch_sort_Querying_001-01-01-01_001-TC_044", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id != true""").collect
-
-
-  }
-
-
-  //To check '!='operator without providing any value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_045", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id != """).collect
-    }
-  }
-
-
-  //To check '!='operator without providing any column name
-  test("Batch_sort_Querying_001-01-01-01_001-TC_046", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where  != false""").collect
-    }
-  }
-
-
-  //To check 'NOT' with valid syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_047", Include) {
-
-    sql(s"""select * from uniqdataquery1 where NOT(cust_id=9000)""").collect
-
-
-  }
-
-
-  //To check 'NOT' using boolean values
-  test("Batch_sort_Querying_001-01-01-01_001-TC_048", Include) {
-
-    sql(s"""select * from uniqdataquery1 where NOT(false)""").collect
-
-
-  }
-
-
-  //To check 'NOT' applying it on a value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_049", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id = 'NOT(false)'""").collect
-
-
-  }
-
-
-  //To check 'NOT' with between operator
-  test("Batch_sort_Querying_001-01-01-01_001-TC_050", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id NOT BETWEEN 9000 and 9005""").collect
-
-
-  }
-
-
-  //To check 'NOT' operator in nested way
-  test("Batch_sort_Querying_001-01-01-01_001-TC_051", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id NOT (NOT(true))""").collect
-    }
-  }
-
-
-  //To check 'NOT' operator with parenthesis.
-  test("Batch_sort_Querying_001-01-01-01_001-TC_052", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id NOT ()""").collect
-    }
-  }
-
-
-  //To check 'NOT' operator without condition.
-  test("Batch_sort_Querying_001-01-01-01_001-TC_053", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id NOT""").collect
-    }
-  }
-
-
-  //To check 'NOT' operator checking case sensitivity.
-  test("Batch_sort_Querying_001-01-01-01_001-TC_054", Include) {
-
-    sql(s"""select * from uniqdataquery1 where nOt(false)""").collect
-
-
-  }
-
-
-  //To check '>' operator without specifying column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_055", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where > 20""").collect
-    }
-  }
-
-
-  //To check '>' operator without specifying value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_056", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id > """).collect
-    }
-  }
-
-
-  //To check '>' operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_057", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id >9005""").collect
-
-
-  }
-
-
-  //To check '>' operator for Integer value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_058", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id > 9010""").collect
-
-
-  }
-
-
-  //To check '>' operator for String value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_059", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_name > 'cust_name_00000'""").collect
-
-
-  }
-
-
-  //To check '<' operator without specifying column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_060", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where < 5""").collect
-    }
-  }
-
-
-  //To check '<' operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_061", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id < 9005""").collect
-
-
-  }
-
-
-  //To check '<' operator for String value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_062", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_name < "cust_name_00001"""").collect
-
-
-  }
-
-
-  //To check '<=' operator without specifying column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_063", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where  <= 2""").collect
-    }
-  }
-
-
-  //To check '<=' operator without providing value
-  test("Batch_sort_Querying_001-01-01-01_001-TC_064", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where  cust_id <= """).collect
-    }
-  }
-
-
-  //To check '<=' operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_065", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id <=9002""").collect
-
-
-  }
-
-
-  //To check '<=' operator adding space between'<' and  '='
-  test("Batch_sort_Querying_001-01-01-01_001-TC_066", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id < =  9002""").collect
-    }
-  }
-
-
-  //To check 'BETWEEN' operator without providing range
-  test("Batch_sort_Querying_001-01-01-01_001-TC_067", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where age between""").collect
-    }
-  }
-
-
-  //To check  'BETWEEN' operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_068", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id between 9002 and 9030""").collect
-
-
-  }
-
-
-  //To check  'BETWEEN' operator providing two same values
-  test("Batch_sort_Querying_001-01-01-01_001-TC_069", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_name beTWeen 'CU%' and 'CU%'""").collect
-
-
-  }
-
-
-  //To check  'NOT BETWEEN' operator for integer
-  test("Batch_sort_Querying_001-01-01-01_001-TC_070", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id NOT between 9024 and 9030""").collect
-
-
-  }
-
-
-  //To check  'NOT BETWEEN' operator for string
-  test("Batch_sort_Querying_001-01-01-01_001-TC_071", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_name NOT beTWeen 'cust_name_00000' and 'cust_name_00001'""").collect
-
-
-  }
-
-
-  //To check  'IS NULL' for case sensitiveness.
-  test("Batch_sort_Querying_001-01-01-01_001-TC_072", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id Is NulL""").collect
-
-
-  }
-
-
-  //To check  'IS NULL' for null field
-  test("Batch_sort_Querying_001-01-01-01_001-TC_073", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_name Is NulL""").collect
-
-
-  }
-
-
-  //To check  'IS NULL' without providing column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_074", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where Is NulL""").collect
-    }
-  }
-
-
-  //To check  'IS NOT NULL' without providing column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_075", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where IS NOT NULL""").collect
-    }
-  }
-
-
-  //To check ''IS NOT NULL' operator with correct syntax
-  test("Batch_sort_Querying_001-01-01-01_001-TC_076", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id IS NOT NULL""").collect
-
-
-  }
-
-
-  //To check  'Like' operator for integer
-  test("Batch_sort_Querying_001-01-01-01_001-TC_077", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id Like '9%'""").collect
-
-
-  }
-
-
-  //To check Limit clause with where condition
-  test("Batch_sort_Querying_001-01-01-01_001-TC_078", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id>10987 limit 15""").collect
-
-
-  }
-
-
-  //To check Limit clause with where condition and no argument
-  test("Batch_sort_Querying_001-01-01-01_001-TC_079", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id=10987 limit""").collect
-    }
-  }
-
-
-  //To check Limit clause with where condition and decimal argument
-  test("Batch_sort_Querying_001-01-01-01_001-TC_080", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id=10987 limit 0.0""").collect
-    }
-  }
-
-
-  //To check where clause with distinct and group by
-  test("Batch_sort_Querying_001-01-01-01_001-TC_081", Include) {
-
-    sql(s"""select distinct cust_name from uniqdataquery1 where cust_name IN("CUST_NAME_01999") group by cust_name""").collect
-
-
-  }
-
-
-  //To check subqueries
-  test("Batch_sort_Querying_001-01-01-01_001-TC_082", Include) {
-
-    sql(s"""select * from (select cust_id from uniqdataquery1 where cust_id IN (10987,10988)) uniqdataquery1 where cust_id IN (10987, 10988)""").collect
-
-
-  }
-
-
-  //To count with where clause
-  test("Batch_sort_Querying_001-01-01-01_001-TC_083", Include) {
-
-    sql(s"""select count(cust_id) from uniqdataquery1 where cust_id > 10874""").collect
-
-
-  }
-
-
-  //To check Join query
-  test("Batch_sort_Querying_001-01-01-01_001-TC_084", Include) {
-     sql(s"""drop table if exists uniqdataquery11""").collect
-   sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
-   sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 join uniqdataquery11 where uniqdataquery1.CUST_ID > 10700 and uniqdataquery11.CUST_ID > 10500""").collect
-
-
-  }
-
-
-  //To check Left join with where clause
-  ignore("Batch_sort_Querying_001-01-01-01_001-TC_085", Include) {
-
-    sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 LEFT join uniqdataquery11 where uniqdataquery1.CUST_ID > 10000""").collect
-
-
-  }
-
-
-  //To check Full join
-  test("Batch_sort_Querying_001-01-01-01_001-TC_086", Include) {
-    intercept[Exception] {
-      sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 FULL JOIN uniqdataquery11 where CUST_ID""").collect
-    }
-  }
-
-
-  //To check Broadcast join
-  ignore("Batch_sort_Querying_001-01-01-01_001-TC_087", Include) {
-
-    sql(s"""select broadcast.cust_id from uniqdataquery1 broadcast join uniqdataquery11 where broadcast.cust_id > 10900""").collect
-
-     sql(s"""drop table uniqdataquery11""").collect
-  }
-
-
-  //To avg function
-  test("Batch_sort_Querying_001-01-01-01_001-TC_088", Include) {
-
-    sql(s"""select avg(cust_name) from uniqdataquery1 where cust_id > 10544 group by cust_name""").collect
-
-
-  }
-
-
-  //To check subquery with aggrgate function avg
-  test("Batch_sort_Querying_001-01-01-01_001-TC_089", Include) {
-
-    sql(s"""select cust_id,avg(cust_id) from uniqdataquery1 where cust_id IN (select cust_id from uniqdataquery1 where cust_id > 0) group by cust_id""").collect
-
-
-  }
-
-
-  //To check HAVING on Measure
-  test("Batch_sort_Querying_001-01-01-01_001-TC_090", Include) {
-
-    sql(s"""select cust_id from uniqdataquery1 where cust_id > 10543 group by cust_id having cust_id = 10546""").collect
-
-
-  }
-
-
-  //To check HAVING on dimension
-  test("Batch_sort_Querying_001-01-01-01_001-TC_091", Include) {
-
-    sql(s"""select cust_name from uniqdataquery1 where cust_id > 10544 group by cust_name having cust_name like 'C%'""").collect
-
-
-  }
-
-
-  //To check HAVING on multiple columns
-  test("Batch_sort_Querying_001-01-01-01_001-TC_092", Include) {
-
-    sql(s"""select cust_id,cust_name from uniqdataquery1 where cust_id > 10544 group by cust_id,cust_name having cust_id = 10545 AND cust_name like 'C%'""").collect
-
-
-  }
-
-
-  //To check HAVING with empty condition
-  test("Batch_sort_Querying_001-01-01-01_001-TC_094", Include) {
-
-    sql(s"""select cust_name from uniqdataquery1 where cust_id > 10544 group by cust_name having """"").collect
-
-
-  }
-
-
-  //To check SORT on measure
-  test("Batch_sort_Querying_001-01-01-01_001-TC_095", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_id asc""").collect
-
-
-  }
-
-
-  //To check SORT on dimemsion
-  test("Batch_sort_Querying_001-01-01-01_001-TC_096", Include) {
-
-    sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc""").collect
-
-
-  }
-
-
-  //To check SORT using 'AND' on multiple column
-  test("Batch_sort_Querying_001-01-01-01_001-TC_097", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc and cust_id asc""").collect
-    }
-  }
-
-
-  //To check Select average names and group by name query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_098", Include) {
-
-    sql(s"""select avg(cust_name) from uniqdataquery1 group by cust_name""").collect
-
-
-  }
-
-
-  //To check Select average id and group by id query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_099", Include) {
-
-    sql(s"""select avg(cust_id) from uniqdataquery1 group by cust_id""").collect
-
-
-  }
-
-
-  //To check average aggregate function with no arguments
-  test("Batch_sort_Querying_001-01-01-01_001-TC_100", Include) {
-    intercept[Exception] {
-      sql(s"""select cust_id,avg() from uniqdataquery1 group by cust_id""").collect
-    }
-  }
-
-
-  //To check average aggregate function with empty string
-  test("Batch_sort_Querying_001-01-01-01_001-TC_101", Include) {
-
-    sql(s"""select cust_id,avg("") from uniqdataquery1 group by cust_id""").collect
-
-
-  }
-
-
-  //To check nested  average aggregate function
-  test("Batch_sort_Querying_001-01-01-01_001-TC_102", Include) {
-    intercept[Exception] {
-      sql(s"""select cust_id,avg(count(cust_id)) from uniqdataquery1 group by cust_id""").collect
-    }
-  }
-
-
-  //To check Multilevel query
-  test("Batch_sort_Querying_001-01-01-01_001-TC_103", Include) {
-
-    sql(s"""select cust_id,avg(cust_id) from uniqdataquery1 where cust_id IN (select cust_id from uniqdataquery1) group by cust_id""").collect
-
-
-  }
-
-
-  //To check Using first() with group by clause
-  test("Batch_sort_Querying_001-01-01-01_001-TC_104", Include) {
-
-    sql(s"""select first(cust_id) from uniqdataquery1 group by cust_id""").collect
-
-
-  }
-
-
-  //To check max with groupby clause query execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_105", Include) {
-
-    sql(s"""select max(cust_name) from uniqdataquery1 group by(cust_name)""").collect
-
-
-  }
-
-
-  //To check max with groupby clause query with id execution
-  test("Batch_sort_Querying_001-01-01-01_001-TC_106", Include) {
-
-    sql(s"""select max(cust_name) from uniqdataquery1 group by(cust_name),cust_id""").collect
-
-
-  }
-
-
-  //To check  multiple aggregate functions
-  test("Batch_sort_Querying_001-01-01-01_001-TC_107", Include) {
-
-    sql(s"""select max(cust_name),sum(cust_name),count(cust_id) from uniqdataquery1 group by(cust_name),cust_id""").collect
-
-
-  }
-
-
-  //To check max with empty string as argument
-  test("Batch_sort_Querying_001-01-01-01_001-TC_108", Include) {
-
-    sql(s"""select max("") from uniqdataquery1 group by(cust_name)""").collect
-
-
-  }
-
-
-  //To check  select count of names with group by clause
-  test("Batch_sort_Querying_001-01-01-01_001-TC_109", Include) {
-
-    sql(s"""select count(cust_name) from uniqdataquery1 group by cust_name""").collect
-
-
-  }
-
-
-  //To check Order by ASC
-  test("Batch_sort_Querying_001-01-01-01_001-TC_110", Include) {
-
-    sql(s"""select * from uniqdataquery1 order by cust_id ASC""").collect
-
-
-  }
-
-
-  //To check Order by DESC
-  test("Batch_sort_Querying_001-01-01-01_001-TC_111", Include) {
-
-    sql(s"""select * from uniqdataquery1 order by cust_id DESC""").collect
-
-
-  }
-
-
-  //To check Order by without column name
-  test("Batch_sort_Querying_001-01-01-01_001-TC_112", Include) {
-    intercept[Exception] {
-      sql(s"""select * from uniqdataquery1 order by ASC""").collect
-    }
-  }
-
-
-  //To check cast Int to String
-  test("Batch_sort_Querying_001-01-01-01_001-TC_113", Include) {
-
-    sql(s"""select cast(bigint_column1 as STRING) from uniqdataquery1""").collect
-
-
-  }
-
-
-  //To check cast string to int
-  test("Batch_sort_Querying_001-01-01-01_001-TC_114", Include) {
-
-    sql(s"""select cast(cust_name as INT) from uniqdataquery1""").collect
-
-
-  }
-
-
-  //To check cast int to decimal
-  test("Batch_sort_Querying_001-01-01-01_001-TC_115", Include) {
-
-    sql(s"""select cast(bigint_column1 as DECIMAL(10,4)) from uniqdataquery1""").collect
-
-
-  }
-
-
-  //To check Using window with order by
-  test("Batch_sort_Querying_001-01-01-01_001-TC_116", Include) {
-
-    sql(s"""select cust_name, sum(bigint_column1) OVER w from uniqdataquery1 WINDOW w AS (PARTITION BY bigint_column2 ORDER BY cust_id)""").collect
-
-
-  }
-
-
-  //To check Using window without partition
-  test("Batch_sort_Querying_001-01-01-01_001-TC_117", Include) {
-    intercept[Exception] {
-      sql(s"""select cust_name, sum(bigint_column1) OVER w from uniqdataquery1 WINDOW w""").collect
-    }
-  }
-
-
-  //To check Using ROLLUP with group by
-  test("Batch_sort_Querying_001-01-01-01_001-TC_118", Include) {
-
-    sql(s"""select cust_name from uniqdataquery1 group by cust_name with ROLLUP""").collect
-
-
-  }
-
-
-  //To check Using ROLLUP without group by clause
-  test("Batch_sort_Querying_001-01-01-01_001-TC_119", Include) {
-    intercept[Exception] {
-      sql(s"""select cust_name from uniqdataquery1 with ROLLUP""").collect
-    }
-    sql(s"""drop table uniqdataquery1""").collect
-  }
-}
\ No newline at end of file
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
index 7246fef..ec9cc8d 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
@@ -1491,7 +1491,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
         | STORED BY 'org.apache.carbondata.format'
-        | tblproperties('dictionary_include'='city','sort_scope'='batch_sort',
+        | tblproperties('dictionary_include'='city','sort_scope'='local_sort',
         | 'sort_columns'='city,name')
       """.stripMargin)
 
@@ -1501,7 +1501,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       case Some(row) => assert(row.get(1).toString.contains("false"))
     }
     descLoc.find(_.get(0).toString.contains("Sort Scope")) match {
-      case Some(row) => assert(row.get(1).toString.contains("batch_sort"))
+      case Some(row) => assert(row.get(1).toString.contains("local_sort"))
     }
   }
   test("test local dictionary custom configuration with other table properties _003") {
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
index 2c3ebca..84a8f09 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/GlobalSortTestCase.scala
@@ -250,7 +250,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
 
   //Carbon-Loading-Optimizations-Global-Sort-01-01-21
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-21", Include) {
-    sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdata11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
 
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdata11 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
@@ -425,7 +425,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
 
   //Carbon-Loading-Optimizations-Global-Sort-01-01-39
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-39", Include) {
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select count(*) from uniqdataquery1 where cust_name="CUST_NAME_00000" group by cust_name""").collect
@@ -436,7 +436,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
 
   //Carbon-Loading-Optimizations-Global-Sort-01-01-40
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-40", Include) {
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select count(*) from uniqdataquery1 where cust_name IN(1,2,3) group by cust_name""").collect
@@ -447,7 +447,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
 
   //Carbon-Loading-Optimizations-Global-Sort-01-01-41
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-41", Include) {
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select * from uniqdataquery1 where cust_id between 9002 and 9030""").collect
@@ -459,7 +459,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-42
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-42", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     intercept[Exception] {
@@ -473,7 +473,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-43
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-43", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',' ,'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select * from uniqdataquery1 where cust_id IS NOT NULL""").collect
@@ -485,7 +485,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-44
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-44", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select * from (select cust_id from uniqdataquery1 where cust_id IN (10987,10988)) uniqdataquery1 where cust_id IN (10987, 10988)""").collect
@@ -498,10 +498,10 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-45", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 join uniqdataquery11 where uniqdataquery1.CUST_ID > 10700 and uniqdataquery11.CUST_ID > 10500""").collect
@@ -515,10 +515,10 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-46", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 LEFT join uniqdataquery11 where uniqdataquery1.CUST_ID > 10000""").collect
@@ -531,10 +531,10 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-47", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select uniqdataquery1.CUST_ID from uniqdataquery1 FULL JOIN uniqdataquery11 where uniqdataquery1.CUST_ID=uniqdataquery11.CUST_ID""").collect
@@ -547,10 +547,10 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-48", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
     sql(s"""drop table if exists uniqdataquery11""").collect
-    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery11 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/batchsort/1000_UniqData.csv' into table uniqdataquery11 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select broadcast.cust_id from uniqdataquery1 broadcast join uniqdataquery11 where broadcast.cust_id > 10900""").collect
@@ -562,7 +562,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-49
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-49", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_id asc""").collect
@@ -574,7 +574,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-50
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-50", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc""").collect
@@ -586,7 +586,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-51
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-51", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select * from uniqdataquery1 where cust_id > 10544 sort by cust_name desc, cust_id asc""").collect
@@ -598,7 +598,7 @@ class GlobalSortTestCase extends QueryTest with BeforeAndAfterAll with BeforeAnd
   //Carbon-Loading-Optimizations-Global-Sort-01-01-52
   test("Carbon-Loading-Optimizations-Global-Sort-01-01-52", Include) {
     sql(s"""drop table if exists uniqdataquery1""").collect
-    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""").collect
+    sql(s"""CREATE TABLE uniqdataquery1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table uniqdataquery1 OPTIONS('DELIMITER'=',','QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
 
     sql(s"""select cust_id,avg(cust_id) from uniqdataquery1 where cust_id IN (select cust_id from uniqdataquery1) group by cust_id""").collect
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/PartitionWithPreAggregateTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/PartitionWithPreAggregateTestCase.scala
index 87f5fda..23d5924 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/PartitionWithPreAggregateTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/PartitionWithPreAggregateTestCase.scala
@@ -70,26 +70,6 @@ class PartitionWithPreAggregateTestCase extends QueryTest with BeforeAndAfterAll
       Seq(Row(159.10)))
   }
 
-  //Loading data into partitioned table with SORT_SCOPE=BATCH_SORT
-  test("Partition-With-PreAggregate_TC003", Include) {
-    sql("drop table if exists partition_table")
-    sql(
-      s"""CREATE TABLE partition_table(shortField SHORT, intField INT, bigintField LONG,
-         |doubleField DOUBLE, timestamp TIMESTAMP, decimalField DECIMAL(18,2),dateField DATE,
-         |charField CHAR(5), floatField FLOAT ) PARTITIONED BY (stringField STRING) STORED BY
-         |'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""".stripMargin)
-    sql(
-      s"""load data inpath '$resourcesPath/Data/partition/list_partition_table.csv' into table
-         |partition_table""".stripMargin)
-    sql(
-      "create datamap ag1 on table partition_table using 'preaggregate' as select shortField, sum" +
-      "(intField) from partition_table group by shortField")
-    checkAnswer(sql(
-      s"""select decimalfield from partition_table where charfield='e' and
-         |floatfield=307.301 group by decimalfield limit 1""".stripMargin),
-      Seq(Row(159.10)))
-  }
-
   //Loading data into partitioned table with SORT_SCOPE=NO_SORT
   test("Partition-With-PreAggregate_TC004", Include) {
     sql("drop table if exists partition_table")
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
index 499c478..d6a9413 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
@@ -331,7 +331,7 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
       sql(
         s"""CREATE EXTERNAL TABLE sdkTable STORED BY
            |'carbondata' LOCATION
-           |'$writerPath' TBLPROPERTIES('sort_scope'='batch_sort') """.stripMargin)
+           |'$writerPath' TBLPROPERTIES('sort_scope'='local_sort') """.stripMargin)
     }
     assert(ex.message.contains("Table properties are not supported for external table"))
   }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
index 6f9df82..2f531d7 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnExcudeDictTestCase.scala
@@ -419,7 +419,7 @@ class SortColumnExcudeDictTestCase extends QueryTest with BeforeAndAfterAll {
 
   override protected def beforeAll() {
     // Adding new properties
-    prop.addProperty("carbon.load.sort.scope", "batch_sort")
+    prop.addProperty("carbon.load.sort.scope", "local_sort")
     prop.addProperty("enable.unsafe.sort", "true")
     prop.addProperty("enable.offheap.sort", "true")
   }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala
index dfaa02d..3c554ff 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SortColumnTestCase.scala
@@ -451,7 +451,7 @@ class SortColumnTestCase extends QueryTest with BeforeAndAfterAll {
 
   override protected def beforeAll() {
     // Adding new properties
-    prop.addProperty("carbon.load.sort.scope", "batch_sort")
+    prop.addProperty("carbon.load.sort.scope", "local_sort")
     prop.addProperty("enable.unsafe.sort", "true")
     prop.addProperty("enable.offheap.sort", "true")
   }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/StandardPartitionTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/StandardPartitionTestCase.scala
index c941363..855b560 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/StandardPartitionTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/StandardPartitionTestCase.scala
@@ -126,14 +126,6 @@ class StandardPartitionTestCase extends QueryTest with BeforeAndAfterAll {
     checkAnswer(sql(s"""select count(*) from partition_table"""), Seq(Row(10)))
   }
 
-  //Loading data into partitioned table with SORT_SCOPE=BATCH_SORT
-  test("Standard-Partition_TC012", Include) {
-    sql(s"""drop table if exists partition_table""")
-    sql(s"""CREATE TABLE partition_table(shortField SHORT, intField INT, bigintField LONG, doubleField DOUBLE, timestamp TIMESTAMP, decimalField DECIMAL(18,2),dateField DATE, charField CHAR(5), floatField FLOAT ) PARTITIONED BY (stringField STRING) STORED BY 'carbondata' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')""")
-    sql(s"""load data inpath '$resourcesPath/Data/partition/list_partition_table.csv' into table partition_table""")
-    checkAnswer(sql(s"""select count(*) from partition_table"""), Seq(Row(10)))
-  }
-
   //Loading data into partitioned table with SORT_SCOPE=NO_SORT
   test("Standard-Partition_TC013", Include) {
     sql(s"""drop table if exists partition_table""")
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
index 7e25dcd..dd32356 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
@@ -31,9 +31,6 @@ class SDVSuites extends Suites with BeforeAndAfterAll {
 
   val suites = new AlterTableTestCase ::
                new BadRecordTestCase ::
-               new BatchSortLoad1TestCase ::
-               new BatchSortLoad2TestCase ::
-               new BatchSortQueryTestCase ::
                new ColumndictTestCase ::
                new CreateTableAsSelectTestCase ::
                new DataLoadingTestCase ::
@@ -43,7 +40,6 @@ class SDVSuites extends Suites with BeforeAndAfterAll {
                new OffheapQuery2TestCase ::
                new OffheapSort1TestCase ::
                new OffheapSort2TestCase ::
-               new PartitionTestCase ::
                new QueriesBasicTestCase ::
                new QueriesBVATestCase ::
                new QueriesCompactionTestCase ::
@@ -84,13 +80,9 @@ class SDVSuites1 extends Suites with BeforeAndAfterAll {
 
   val suites = new BadRecordTestCase ::
                new ComplexDataTypeTestCase ::
-               new BatchSortLoad1TestCase ::
-               new BatchSortQueryTestCase ::
                new DataLoadingTestCase ::
                new OffheapSort2TestCase ::
-               new PartitionTestCase ::
                new QueriesBasicTestCase ::
-               new BatchSortLoad3TestCase ::
                new GlobalSortTestCase ::
                new MergeIndexTestCase :: Nil
 
@@ -143,7 +135,6 @@ class SDVSuites3 extends Suites with BeforeAndAfterAll {
 class SDVSuites4 extends Suites with BeforeAndAfterAll {
 
   val suites = new AlterTableTestCase ::
-               new BatchSortLoad2TestCase ::
                new BucketingTestCase ::
                new CreateTableAsSelectTestCase ::
                new InvertedindexTestCase ::
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
index 110319a..6272127 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
@@ -29,7 +29,6 @@ class TestCreateTableWithSortScope extends QueryTest with BeforeAndAfterAll {
     sql("use default")
     sql("DROP TABLE IF EXISTS tableWithGlobalSort")
     sql("DROP TABLE IF EXISTS tableWithLocalSort")
-    sql("DROP TABLE IF EXISTS tableWithBatchSort")
     sql("DROP TABLE IF EXISTS tableWithNoSort")
     sql("DROP TABLE IF EXISTS tableWithUnsupportSortScope")
   }
@@ -63,18 +62,6 @@ class TestCreateTableWithSortScope extends QueryTest with BeforeAndAfterAll {
 
     sql(
       s"""
-         | CREATE TABLE tableWithBatchSort(
-         | intField INT,
-         | stringField STRING
-         | )
-         | STORED BY 'carbondata'
-         | TBLPROPERTIES('SORT_COLUMNS'='stringField', 'SORT_SCOPE'='BATCH_SORT')
-       """.stripMargin)
-
-    checkExistence(sql("DESCRIBE FORMATTED tableWithBatchSort"), true, "BATCH_SORT")
-
-    sql(
-      s"""
          | CREATE TABLE tableWithNoSort(
          | intField INT,
          | stringField STRING
@@ -100,14 +87,13 @@ class TestCreateTableWithSortScope extends QueryTest with BeforeAndAfterAll {
     }
     assert(exception_unsupported_sortscope.getMessage.contains(
       "Passing invalid SORT_SCOPE 'abc', valid SORT_SCOPE are 'NO_SORT'," +
-      " 'BATCH_SORT', 'LOCAL_SORT' and 'GLOBAL_SORT' "))
+      " 'LOCAL_SORT' and 'GLOBAL_SORT' "))
   }
 
   override def afterAll: Unit = {
     sql("use default")
     sql("DROP TABLE IF EXISTS tableWithGlobalSort")
     sql("DROP TABLE IF EXISTS tableWithLocalSort")
-    sql("DROP TABLE IF EXISTS tableWithBatchSort")
     sql("DROP TABLE IF EXISTS tableWithNoSort")
     sql("DROP TABLE IF EXISTS tableWithUnsupportSortScope")
     sql("DROP TABLE IF EXISTS tableLoadWithSortScope")
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
deleted file mode 100644
index 412fd37..0000000
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.dataload
-
-import java.io.{BufferedWriter, File, FileWriter, FilenameFilter}
-
-import org.apache.spark.sql.Row
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.spark.sql.test.util.QueryTest
-
-import org.apache.carbondata.core.datamap.Segment
-import org.apache.carbondata.core.datastore.impl.FileFactory
-import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore
-import org.apache.carbondata.core.metadata.{CarbonMetadata, SegmentFileStore}
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable
-import org.apache.carbondata.core.util.path.CarbonTablePath
-
-class TestBatchSortDataLoad extends QueryTest with BeforeAndAfterAll {
-  var filePath: String = _
-
-  def buildTestData() = {
-    filePath = s"${integrationPath}/spark-common-test/target/big.csv"
-    val file = new File(filePath)
-    val writer = new BufferedWriter(new FileWriter(file))
-    writer.write("c1,c2,c3, c4, c5, c6, c7, c8, c9, c10")
-    writer.newLine()
-    for(i <- 0 until 100000) {
-      writer.write("a" + i%1000 + "," +
-                   "b" + i%1000 + "," +
-                   "c" + i%1000 + "," +
-                   "d" + i%1000 + "," +
-                   "e" + i%1000 + "," +
-                   "f" + i%1000 + "," +
-                   i%1000 + "," +
-                   i%1000 + "," +
-                   i%1000 + "," +
-                   i%1000 + "\n")
-      if ( i % 10000 == 0) {
-        writer.flush()
-      }
-    }
-    writer.close()
-  }
-
-  def dropTable() = {
-    sql("DROP TABLE IF EXISTS carbon_load1")
-    sql("DROP TABLE IF EXISTS carbon_load2")
-    sql("DROP TABLE IF EXISTS carbon_load3")
-    sql("DROP TABLE IF EXISTS carbon_load4")
-    sql("DROP TABLE IF EXISTS carbon_load5")
-    sql("DROP TABLE IF EXISTS carbon_load6")
-  }
-
-  override def beforeAll {
-    dropTable
-    buildTestData
-  }
-  
-  test("test batch sort load by passing option to load command") {
-
-    sql(
-      """
-        | CREATE TABLE carbon_load1(c1 string, c2 string, c3 string, c4 string, c5 string,
-        | c6 string, c7 int, c8 int, c9 int, c10 int)
-        | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6',
-        | 'sort_scope'='batch_sort', 'sort_columns'='c1,c2,c3,c4,c5,c6')
-      """.stripMargin)
-
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load1 " +
-        s"OPTIONS('batch_sort_size_inmb'='1')")
-
-    checkAnswer(sql("select count(*) from carbon_load1"), Seq(Row(100000)))
-
-    assert(getIndexfileCount("carbon_load1") == 5, "Something wrong in batch sort")
-  }
-
-  test("test batch sort load by passing option to load command and compare with normal load") {
-
-    sql(
-      """
-        | CREATE TABLE carbon_load2(c1 string, c2 string, c3 string, c4 string, c5 string,
-        | c6 string, c7 int, c8 int, c9 int, c10 int)
-        | STORED BY 'org.apache.carbondata.format'
-      """.stripMargin)
-
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load2 ")
-
-    checkAnswer(sql("select * from carbon_load1 where c1='a1' order by c1"),
-      sql("select * from carbon_load2 where c1='a1' order by c1"))
-
-  }
-
-  test("test batch sort load by passing option and compaction") {
-
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load1 " +
-        s"OPTIONS('batch_sort_size_inmb'='1')")
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load1 " +
-        s"OPTIONS('batch_sort_size_inmb'='1')")
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load1 " +
-        s"OPTIONS('batch_sort_size_inmb'='1')")
-    sql("alter table carbon_load1 compact 'major'")
-    Thread.sleep(4000)
-    checkAnswer(sql("select count(*) from carbon_load1"), Seq(Row(400000)))
-
-    assert(getIndexfileCount("carbon_load1", "0.1") == 1, "Something wrong in compaction after batch sort")
-
-  }
-
-  test("test batch sort load by passing option with single pass") {
-
-    sql(
-      """
-        | CREATE TABLE carbon_load3(c1 string, c2 string, c3 string, c4 string, c5 string,
-        | c6 string, c7 int, c8 int, c9 int, c10 int)
-        | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6',
-        | 'sort_scope'='batch_sort', 'sort_columns'='c1,c2,c3,c4,c5,c6')
-      """.stripMargin)
-
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load3 " +
-        s"OPTIONS('batch_sort_size_inmb'='1', 'single_pass'='true')")
-
-    checkAnswer(sql("select count(*) from carbon_load3"), Seq(Row(100000)))
-
-    assert(getIndexfileCount("carbon_load3") == 5, "Something wrong in batch sort")
-
-    checkAnswer(sql("select * from carbon_load3 where c1='a1' order by c1"),
-      sql("select * from carbon_load2 where c1='a1' order by c1"))
-
-  }
-
-  test("test batch sort load by with out passing option but through carbon properties") {
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "BATCH_SORT")
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB, "1")
-    sql(
-      """
-        | CREATE TABLE carbon_load4(c1 string, c2 string, c3 string, c4 string, c5 string,
-        | c6 string, c7 int, c8 int, c9 int, c10 int)
-        | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6', 'sort_columns'='c1,c2,c3,c4,c5,c6')
-      """.stripMargin)
-
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load4 " )
-
-    checkAnswer(sql("select count(*) from carbon_load4"), Seq(Row(100000)))
-
-    assert(getIndexfileCount("carbon_load4") == 5, "Something wrong in batch sort")
-    CarbonProperties.getInstance().
-      addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
-        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB, "0")
-  }
-
-  test("test batch sort load by with out passing option but through carbon properties with default size") {
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "BATCH_SORT")
-    sql(
-      """
-        | CREATE TABLE carbon_load6(c1 string, c2 string, c3 string, c4 string, c5 string,
-        | c6 string, c7 int, c8 int, c9 int, c10 int)
-        | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6')
-      """.stripMargin)
-
-    sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load6 " )
-
-    checkAnswer(sql("select count(*) from carbon_load6"), Seq(Row(100000)))
-
-    assert(getIndexfileCount("carbon_load6") == 1, "Something wrong in batch sort")
-    CarbonProperties.getInstance().
-      addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
-        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
-  }
-
-  def getIndexfileCount(tableName: String, segmentNo: String = "0"): Int = {
-    val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default", tableName)
-    val segmentDir = CarbonTablePath.getSegmentPath(carbonTable.getTablePath, segmentNo)
-    if (FileFactory.isFileExist(segmentDir)) {
-      new SegmentIndexFileStore().getIndexFilesFromSegment(segmentDir).size()
-    } else {
-      val segment = Segment.getSegment(segmentNo, carbonTable.getTablePath)
-      new SegmentFileStore(carbonTable.getTablePath, segment.getSegmentFileName).getIndexCarbonFiles.size()
-    }
-  }
-
-  override def afterAll {
-    dropTable
-    new File(filePath).delete()
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
-        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
-  }
-}
-
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
index e0ecc4d..ee67c50 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
@@ -1523,7 +1523,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
         | STORED BY 'org.apache.carbondata.format'
-        | tblproperties('dictionary_include'='city','sort_scope'='batch_sort',
+        | tblproperties('dictionary_include'='city','sort_scope'='local_sort',
         | 'sort_columns'='city,name')
       """.stripMargin)
 
@@ -1534,7 +1534,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
       case None => assert(false)
     }
     descLoc.find(_.get(0).toString.contains("Sort Scope")) match {
-      case Some(row) => assert(row.get(1).toString.contains("batch_sort"))
+      case Some(row) => assert(row.get(1).toString.contains("local_sort"))
       case None => assert(false)
     }
   }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
index bbd58c0..a6a48d5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
@@ -92,10 +92,10 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
       " workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, " +
       "projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int," +
       "utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties" +
-      "('dictionary_exclude'='empno','sort_columns'='empno', 'SORT_SCOPE'='BATCH_SORT')")
+      "('dictionary_exclude'='empno','sort_columns'='empno', 'SORT_SCOPE'='local_sort')")
     sql(
       s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable1a OPTIONS
-         |('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'batch_sort_size_inmb'='64')""".stripMargin)
+         |('DELIMITER'= ',', 'QUOTECHAR'= '\"')""".stripMargin)
     checkAnswer(sql("select empname from sorttable1a"),
       sql("select empname from origintable1 order by empname"))
   }
@@ -111,10 +111,10 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
       "projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int," +
       "utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties" +
       "('dictionary_exclude'='empno,empname,workgroupcategoryname','sort_columns'='empno,empname'," +
-      "'SORT_SCOPE'='BATCH_SORT')")
+      "'SORT_SCOPE'='local_sort')")
     sql(
       s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable1b OPTIONS
-          |('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'batch_sort_size_inmb'='64')""".stripMargin)
+          |('DELIMITER'= ',', 'QUOTECHAR'= '\"')""".stripMargin)
     checkAnswer(sql("select empname from sorttable1b"),
       sql("select empname from origintable1 order by empname"))
   }
@@ -133,7 +133,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
 
   test("create table with multi-sort_columns and data loading with offheap safe") {
     try {
-      setLoadingProperties("true", "false", "false")
+      setLoadingProperties("true", "false")
       sql("CREATE TABLE sorttable4_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable4_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
       checkAnswer(sql("select workgroupcategory, empname from sorttable4_offheap_safe"), sql("select workgroupcategory, empname from origintable1 order by workgroupcategory"))
@@ -144,7 +144,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
 
   test("create table with multi-sort_columns and data loading with offheap and unsafe sort") {
     try {
-      setLoadingProperties("true", "true", "false")
+      setLoadingProperties("true", "true")
       sql(
         "CREATE TABLE sorttable4_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable4_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
@@ -155,22 +155,9 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     }
   }
 
-  test("create table with multi-sort_columns and data loading with offheap and inmemory sort") {
-    try {
-      setLoadingProperties("true", "false", "true")
-      sql(
-        "CREATE TABLE sorttable4_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')")
-      sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable4_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      checkAnswer(sql("select workgroupcategory, empname from sorttable4_offheap_inmemory"),
-        sql("select workgroupcategory, empname from origintable1 order by workgroupcategory"))
-    } finally {
-      defaultLoadingProperties
-    }
-  }
-
   test("create table with multi-sort_columns and data loading with heap") {
     try {
-      setLoadingProperties("false", "false", "false")
+      setLoadingProperties("false", "false")
       sql(
         "CREATE TABLE sorttable4_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable4_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
@@ -183,7 +170,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
 
   test("create table with multi-sort_columns and data loading with heap and unsafe sort") {
     try {
-      setLoadingProperties("false", "true", "false")
+      setLoadingProperties("false", "true")
       sql(
         "CREATE TABLE sorttable4_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable4_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
@@ -194,19 +181,6 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     }
   }
 
-  test("create table with multi-sort_columns and data loading with heap and inmemory sort") {
-    try {
-      setLoadingProperties("false", "false", "true")
-      sql(
-        "CREATE TABLE sorttable4_heap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='workgroupcategory, empname')")
-      sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE sorttable4_heap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      checkAnswer(sql("select workgroupcategory, empname from sorttable4_heap_inmemory"),
-        sql("select workgroupcategory, empname from origintable1 order by workgroupcategory"))
-    } finally {
-      defaultLoadingProperties
-    }
-  }
-
   test("compaction on sort_columns table") {
     sql("CREATE TABLE origintable2 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
@@ -241,7 +215,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
       sql("drop table if exists origintable1")
       sql("CREATE TABLE origintable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      setLoadingProperties("false", "false", "false")
+      setLoadingProperties("false", "false")
       sql("CREATE TABLE unsortedtable_heap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE unsortedtable_heap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'TIMESTAMPFORMAT'='dd-MM-yyyy')""")
       checkAnswer(sql("select * from unsortedtable_heap_safe where empno = 11"), sql("select * from origintable1 where empno = 11"))
@@ -256,7 +230,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
       sql("drop table if exists origintable1")
       sql("CREATE TABLE origintable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      setLoadingProperties("false", "true", "false")
+      setLoadingProperties("false", "true")
       sql("CREATE TABLE unsortedtable_heap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE unsortedtable_heap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'TIMESTAMPFORMAT'='dd-MM-yyyy')""")
       checkAnswer(sql("select * from unsortedtable_heap_unsafe where empno = 11"), sql("select * from origintable1 where empno = 11"))
@@ -266,27 +240,12 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     }
   }
 
-  test("unsorted table creation, query and loading with heap and inmemory sort config") {
-    try {
-      sql("drop table if exists origintable1")
-      sql("CREATE TABLE origintable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
-      sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      setLoadingProperties("false", "false", "true")
-      sql("CREATE TABLE unsortedtable_heap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')")
-      sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE unsortedtable_heap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'TIMESTAMPFORMAT'='dd-MM-yyyy')""")
-      checkAnswer(sql("select * from unsortedtable_heap_inmemory where empno = 11"), sql("select * from origintable1 where empno = 11"))
-      checkAnswer(sql("select * from unsortedtable_heap_inmemory order by empno"), sql("select * from origintable1 order by empno"))
-    } finally {
-      defaultLoadingProperties
-    }
-  }
-
   test("unsorted table creation, query and data loading with offheap and safe sort config") {
     try {
       sql("drop table if exists origintable1")
       sql("CREATE TABLE origintable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      setLoadingProperties("true", "false", "false")
+      setLoadingProperties("true", "false")
       sql("CREATE TABLE unsortedtable_offheap_safe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE unsortedtable_offheap_safe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'TIMESTAMPFORMAT'='dd-MM-yyyy')""")
       checkAnswer(sql("select * from unsortedtable_offheap_safe where empno = 11"), sql("select * from origintable1 where empno = 11"))
@@ -301,7 +260,7 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
       sql("drop table if exists origintable1")
       sql("CREATE TABLE origintable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      setLoadingProperties("true", "true", "false")
+      setLoadingProperties("true", "true")
       sql("CREATE TABLE unsortedtable_offheap_unsafe (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')")
       sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE unsortedtable_offheap_unsafe OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'TIMESTAMPFORMAT'='dd-MM-yyyy')""")
       checkAnswer(sql("select * from unsortedtable_offheap_unsafe where empno = 11"), sql("select * from origintable1 where empno = 11"))
@@ -311,29 +270,6 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     }
   }
 
-  test("unsorted table creation, query and data loading with offheap and inmemory sort config") {
-    try {
-      sql("drop table if exists origintable1")
-      sql("CREATE TABLE origintable1 (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
-      sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE origintable1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')""")
-      setLoadingProperties("true", "false", "true")
-      sql("CREATE TABLE unsortedtable_offheap_inmemory (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format' tblproperties('sort_columns'='')")
-      sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE unsortedtable_offheap_inmemory OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'TIMESTAMPFORMAT'='dd-MM-yyyy')""")
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno = 11"), sql("select * from origintable1 where empno = 11"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno < 15 order by empno"), sql("select * from origintable1 where empno < 15 order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno <= 15 order by empno"), sql("select * from origintable1 where empno <= 15 order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno > 15 order by empno"), sql("select * from origintable1 where empno > 15 order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno >= 15 order by empno"), sql("select * from origintable1 where empno >= 15 order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno <> 15 order by empno"), sql("select * from origintable1 where empno <> 15 order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno in (15, 16, 17) order by empno"), sql("select * from origintable1 where empno in (15, 16, 17) order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno is null"), sql("select * from origintable1 where empno is null order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory where empno is not null"), sql("select * from origintable1 where empno is not null order by empno"))
-      checkAnswer(sql("select * from unsortedtable_offheap_inmemory order by empno"), sql("select * from origintable1 order by empno"))
-    } finally {
-      defaultLoadingProperties
-    }
-  }
-
   test("create table with invalid values for numeric data type columns specified as sort_columns") {
     // load hive data
     sql("CREATE TABLE test_sort_col_hive (id INT, name STRING, age INT) row format delimited fields terminated by ','")
@@ -444,12 +380,9 @@ class TestSortColumns extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS tableTwo")
   }
 
-  def setLoadingProperties(offheap: String, unsafe: String, useBatch: String): Unit = {
+  def setLoadingProperties(offheap: String, unsafe: String): Unit = {
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, offheap)
-    if (useBatch.equalsIgnoreCase("true")) {
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_SORT, "BATCH_SORT")
-    }
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, useBatch)
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_SORT, unsafe)
   }
 
   def defaultLoadingProperties = {
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
index 136474e..ec05108 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
@@ -285,7 +285,7 @@ class TestSortColumnsWithUnsafe extends QueryTest with BeforeAndAfterAll with Be
   def setLoadingProperties(offheap: String, unsafe: String, useBatch: String): Unit = {
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, offheap)
     if (useBatch.equalsIgnoreCase("true")) {
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_SORT, "BATCH_SORT")
+      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_UNSAFE_SORT, "true")
     }
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, useBatch)
   }
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionGlobalSortTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionGlobalSortTestCase.scala
index 564c5f3..f89f84d 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionGlobalSortTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionGlobalSortTestCase.scala
@@ -939,19 +939,19 @@ class StandardPartitionGlobalSortTestCase extends QueryTest with BeforeAndAfterA
     assert(exMessage.getMessage.contains("day is not a valid partition column in table default.partitionnocolumn"))
   }
 
-  ignore("data loading with default partition in static partition table with batchsort") {
-    sql("DROP TABLE IF EXISTS partitiondefaultbatchsort")
+  test("data loading with default partition in static partition table with local_sort") {
+    sql("DROP TABLE IF EXISTS partitiondefaultlocalsort")
     sql(
       """
-        | CREATE TABLE partitiondefaultbatchsort (empno int, designation String,
+        | CREATE TABLE partitiondefaultlocalsort (empno int, designation String,
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectenddate Timestamp,attendance int,
         |  utilization int, doj Timestamp, empname String)
         | PARTITIONED BY (projectjoindate Timestamp, salary decimal)
-        | STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('SORT_SCOPE'='BATCH_SORT')
+        | STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('SORT_SCOPE'='local_sort')
       """.stripMargin)
-    sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitiondefaultbatchsort OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
-    checkAnswer(sql("select count(*) from partitiondefaultbatchsort"), Seq(Row(10)))
+    sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitiondefaultlocalsort OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
+    checkAnswer(sql("select count(*) from partitiondefaultlocalsort"), Seq(Row(10)))
   }
 
   test("data loading with default partition in static partition table with nosort") {
@@ -1087,5 +1087,6 @@ class StandardPartitionGlobalSortTestCase extends QueryTest with BeforeAndAfterA
     sql("drop table if exists partitiondatadelete")
     sql("drop table if exists comp_dt2")
     sql("drop table if exists partitionNoColumn")
+    sql("drop table if exists partitiondefaultlocalsort")
   }
 }
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
index 787b591..2e4363d 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
@@ -740,7 +740,7 @@ object CommonUtil {
       if (!CarbonUtil.isValidSortOption(sortScopeOption.get)) {
         throw new MalformedCarbonCommandException(
           s"Invalid SORT_SCOPE ${ sortScopeOption.get }, " +
-          s"valid SORT_SCOPE are 'NO_SORT', 'BATCH_SORT', 'LOCAL_SORT' and 'GLOBAL_SORT'")
+          s"valid SORT_SCOPE are 'NO_SORT', 'LOCAL_SORT' and 'GLOBAL_SORT'")
       }
     }
   }
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index d21e6e5..635afee 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -1082,7 +1082,6 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
       "COMMENTCHAR",
       "DATEFORMAT",
       "BAD_RECORD_PATH",
-      "BATCH_SORT_SIZE_INMB",
       "GLOBAL_SORT_PARTITIONS",
       "SINGLE_PASS",
       "IS_EMPTY_DATA_BAD_RECORD",
@@ -1194,7 +1193,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
       if (!CarbonUtil.isValidSortOption(optionValue)) {
         throw new InvalidConfigurationException(
           s"Passing invalid SORT_SCOPE '$optionValue', valid SORT_SCOPE are 'NO_SORT'," +
-          s" 'BATCH_SORT', 'LOCAL_SORT' and 'GLOBAL_SORT' ")
+          s" 'LOCAL_SORT' and 'GLOBAL_SORT' ")
       }
     }
 
diff --git a/integration/spark2/src/main/commonTo2.2And2.3/org/apache/spark/sql/hive/CarbonSqlConf.scala b/integration/spark2/src/main/commonTo2.2And2.3/org/apache/spark/sql/hive/CarbonSqlConf.scala
index 2128ffd..14231ed 100644
--- a/integration/spark2/src/main/commonTo2.2And2.3/org/apache/spark/sql/hive/CarbonSqlConf.scala
+++ b/integration/spark2/src/main/commonTo2.2And2.3/org/apache/spark/sql/hive/CarbonSqlConf.scala
@@ -14,6 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.spark.sql.hive
 
 import org.apache.spark.sql.SparkSession
@@ -71,13 +72,6 @@ class CarbonSQLConf(sparkSession: SparkSession) {
         .stringConf
         .createWithDefault(carbonProperties.getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
           CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT))
-    val BATCH_SORT_SIZE_INMB =
-      buildConf(CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB)
-        .doc("Property to specify batch sort size in MB.")
-        .stringConf
-        .createWithDefault(carbonProperties
-          .getProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-            CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT))
     val SINGLE_PASS =
       buildConf(CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS)
         .doc("Property to enable/disable single_pass.")
@@ -128,9 +122,6 @@ class CarbonSQLConf(sparkSession: SparkSession) {
     sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE,
       carbonProperties.getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
         CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT))
-    sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB,
-      carbonProperties.getProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-        CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT))
     sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS,
       CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS_DEFAULT.toBoolean)
     sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORD_PATH,
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
index 048a1f6..4adb1aa 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala
@@ -92,7 +92,7 @@ case class CarbonCreateTableCommand(
       if (!CarbonUtil.isValidSortOption(sortScope)) {
         throw new InvalidConfigurationException(
           s"Passing invalid SORT_SCOPE '$sortScope', valid SORT_SCOPE are 'NO_SORT'," +
-          s" 'BATCH_SORT', 'LOCAL_SORT' and 'GLOBAL_SORT' ")
+          s" 'LOCAL_SORT' and 'GLOBAL_SORT' ")
       }
 
       if (tableInfo.getFactTable.getListOfColumns.size <= 0) {
diff --git a/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSQLConf.scala b/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSQLConf.scala
index 15ccb0c..413973e 100644
--- a/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSQLConf.scala
+++ b/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSQLConf.scala
@@ -72,13 +72,6 @@ class CarbonSQLConf(sparkSession: SparkSession) {
         .stringConf
         .createWithDefault(carbonProperties.getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
           CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT))
-    val BATCH_SORT_SIZE_INMB =
-      SQLConfigBuilder(CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB)
-        .doc("Property to specify batch sort size in MB.")
-        .stringConf
-        .createWithDefault(carbonProperties
-          .getProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-            CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT))
     val SINGLE_PASS =
       SQLConfigBuilder(CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS)
         .doc("Property to enable/disable single_pass.")
@@ -129,9 +122,6 @@ class CarbonSQLConf(sparkSession: SparkSession) {
     sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE,
       carbonProperties.getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
         CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT))
-    sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB,
-      carbonProperties.getProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-        CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT))
     sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS,
       CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS_DEFAULT.toBoolean)
     sparkSession.conf.set(CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORD_PATH,
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
index a7dceb4..abf044d 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
@@ -95,12 +95,12 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest with BeforeAndAfterAll
     }
   }
 
-  test("dataload with LOAD_USE_BATCH_SORT='true' with bad_records_action='FAIL'") {
+  test("dataload with bad_records_action='FAIL'") {
     try {
         CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "batch_sort")
+        .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "local_sort")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL")
       sql("create table data_bm(name String, dob long, weight int) " +
@@ -124,12 +124,12 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest with BeforeAndAfterAll
     }
   }
 
-  test("dataload with LOAD_USE_BATCH_SORT='true' with bad_records_action='FORCE'") {
+  test("dataload with bad_records_action='FORCE'") {
     try {
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "BATCH_SORT")
+        .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "local_sort")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FORCE")
       sql("create table data_bmf(name String, dob long, weight int) " +
@@ -155,12 +155,12 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest with BeforeAndAfterAll
     }
   }
 
-  test("dataload with LOAD_USE_BATCH_SORT='true' with bad_records_action='REDIRECT'") {
+  test("dataload with bad_records_action='REDIRECT'") {
     try {
         CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "BATCH_SORT")
+        .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "local_sort")
       sql("create table data_bm_no_good_data(name String, dob long, weight int) " +
           "STORED BY 'org.apache.carbondata.format'")
       val testData = s"$resourcesPath/badrecords/dummy2.csv"
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
index 8b03630..762008a 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/commands/SetCommandTestCase.scala
@@ -104,18 +104,6 @@ class SetCommandTestCase extends Spark2QueryTest with BeforeAndAfterAll{
         sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE}"))
     }
   }
-  // batch_sort_size_inmb
-  test(s"test set command for ${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}=4") {
-    checkAnswer(sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}=4"),
-      sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}"))
-  }
-
-  test(s"test set ${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB} for invalid option") {
-    intercept[InvalidConfigurationException] {
-      checkAnswer(sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}=hjf"),
-        sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB}"))
-    }
-  }
   // single_pass
   test(s"test set command for ${CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS}=true") {
     checkAnswer(sql(s"set ${CarbonLoadOptionConstants.CARBON_OPTIONS_SINGLE_PASS}=true"),
@@ -135,10 +123,6 @@ class SetCommandTestCase extends Spark2QueryTest with BeforeAndAfterAll{
       sql(s"set carbon.table.load.sort.scope.db.tbl"))
 
     checkAnswer(
-      sql(s"set carbon.table.load.sort.scope.db.tbl=batch_sort"),
-      sql(s"set carbon.table.load.sort.scope.db.tbl"))
-
-    checkAnswer(
       sql(s"set carbon.table.load.sort.scope.db.tbl=local_sort"),
       sql(s"set carbon.table.load.sort.scope.db.tbl"))
 
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index 265abe5..4cf6b54 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -780,8 +780,8 @@ test("test alter command for boolean data type with correct default measure valu
     sql("DROP TABLE IF EXISTS t1")
     sql(s"CREATE TABLE t1(age int, name string) STORED BY 'carbondata' TBLPROPERTIES" +
         s"('sort_columns'='age', 'sort_scope'='local_sort')")
-    sql("ALTER TABLE t1 SET TBLPROPERTIES('sort_scope'='batch_sort')")
-    assert(sortScopeInDescFormatted("t1").equalsIgnoreCase("BATCH_SORT"))
+    sql("ALTER TABLE t1 SET TBLPROPERTIES('sort_scope'='global_sort')")
+    assert(sortScopeInDescFormatted("t1").equalsIgnoreCase("global_sort"))
     sql("DROP TABLE t1")
   }
 
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
index bee98f6..6e72fc7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
@@ -41,7 +41,6 @@ import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingExcep
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
 import org.apache.carbondata.processing.loading.steps.CarbonRowDataWriterProcessorStepImpl;
 import org.apache.carbondata.processing.loading.steps.DataConverterProcessorStepImpl;
-import org.apache.carbondata.processing.loading.steps.DataWriterBatchProcessorStepImpl;
 import org.apache.carbondata.processing.loading.steps.DataWriterProcessorStepImpl;
 import org.apache.carbondata.processing.loading.steps.InputProcessorStepImpl;
 import org.apache.carbondata.processing.loading.steps.InputProcessorStepWithNoConverterImpl;
@@ -72,8 +71,6 @@ public final class DataLoadProcessBuilder {
       return buildInternalForNoSort(inputIterators, configuration);
     } else if (configuration.getBucketingInfo() != null) {
       return buildInternalForBucketing(inputIterators, configuration);
-    } else if (sortScope.equals(SortScopeOptions.SortScope.BATCH_SORT)) {
-      return buildInternalForBatchSort(inputIterators, configuration);
     } else {
       return buildInternal(inputIterators, configuration);
     }
@@ -122,12 +119,6 @@ public final class DataLoadProcessBuilder {
           new SortProcessorStepImpl(configuration, inputProcessorStep);
       //  Writes the sorted data in carbondata format.
       return new DataWriterProcessorStepImpl(configuration, sortProcessorStep);
-    } else if (sortScope.equals(SortScopeOptions.SortScope.BATCH_SORT)) {
-      //  Sorts the data by SortColumn or not
-      AbstractDataLoadProcessorStep sortProcessorStep =
-          new SortProcessorStepImpl(configuration, inputProcessorStep);
-      // Writes the sorted data in carbondata format.
-      return new DataWriterBatchProcessorStepImpl(configuration, sortProcessorStep);
     } else {
       // In all other cases like global sort and no sort uses this step
       return new CarbonRowDataWriterProcessorStepImpl(configuration, inputProcessorStep);
@@ -152,34 +143,12 @@ public final class DataLoadProcessBuilder {
           new SortProcessorStepImpl(configuration, converterProcessorStep);
       //  Writes the sorted data in carbondata format.
       return new DataWriterProcessorStepImpl(configuration, sortProcessorStep);
-    } else if (sortScope.equals(SortScopeOptions.SortScope.BATCH_SORT)) {
-      //  Sorts the data by SortColumn or not
-      AbstractDataLoadProcessorStep sortProcessorStep =
-          new SortProcessorStepImpl(configuration, converterProcessorStep);
-      // Writes the sorted data in carbondata format.
-      return new DataWriterBatchProcessorStepImpl(configuration, sortProcessorStep);
     } else {
       // In all other cases like global sort and no sort uses this step
       return new CarbonRowDataWriterProcessorStepImpl(configuration, converterProcessorStep);
     }
   }
 
-  private AbstractDataLoadProcessorStep buildInternalForBatchSort(CarbonIterator[] inputIterators,
-      CarbonDataLoadConfiguration configuration) {
-    // 1. Reads the data input iterators and parses the data.
-    AbstractDataLoadProcessorStep inputProcessorStep =
-        new InputProcessorStepImpl(configuration, inputIterators);
-    // 2. Converts the data like dictionary or non dictionary or complex objects depends on
-    // data types and configurations.
-    AbstractDataLoadProcessorStep converterProcessorStep =
-        new DataConverterProcessorStepImpl(configuration, inputProcessorStep);
-    // 3. Sorts the data by SortColumn or not
-    AbstractDataLoadProcessorStep sortProcessorStep =
-        new SortProcessorStepImpl(configuration, converterProcessorStep);
-    // 4. Writes the sorted data in carbondata format.
-    return new DataWriterBatchProcessorStepImpl(configuration, sortProcessorStep);
-  }
-
   private AbstractDataLoadProcessorStep buildInternalForBucketing(CarbonIterator[] inputIterators,
       CarbonDataLoadConfiguration configuration) throws Exception {
     // 1. Reads the data input iterators and parses the data.
@@ -253,8 +222,6 @@ public final class DataLoadProcessBuilder {
     configuration.setParentTablePath(loadModel.getParentTablePath());
     configuration
         .setDataLoadProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, loadModel.getSortScope());
-    configuration.setDataLoadProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-        loadModel.getBatchSortSizeInMb());
     configuration.setDataLoadProperty(CarbonCommonConstants.LOAD_GLOBAL_SORT_PARTITIONS,
         loadModel.getGlobalSortPartitions());
     configuration.setDataLoadProperty(CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORD_PATH,
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
index ddfd8c0..ff7c14b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
@@ -186,10 +186,6 @@ public class CarbonLoadModel implements Serializable {
   private String sortScope;
 
   /**
-   * Batch sort size in mb.
-   */
-  private String batchSortSizeInMb;
-  /**
    * bad record location
    */
   private String badRecordsLocation;
@@ -484,7 +480,6 @@ public class CarbonLoadModel implements Serializable {
     copy.isEmptyDataBadRecord = isEmptyDataBadRecord;
     copy.skipEmptyLine = skipEmptyLine;
     copy.sortScope = sortScope;
-    copy.batchSortSizeInMb = batchSortSizeInMb;
     copy.isAggLoadRequest = isAggLoadRequest;
     copy.badRecordsLocation = badRecordsLocation;
     copy.isLoadWithoutConverterStep = isLoadWithoutConverterStep;
@@ -545,7 +540,6 @@ public class CarbonLoadModel implements Serializable {
     copyObj.isEmptyDataBadRecord = isEmptyDataBadRecord;
     copyObj.skipEmptyLine = skipEmptyLine;
     copyObj.sortScope = sortScope;
-    copyObj.batchSortSizeInMb = batchSortSizeInMb;
     copyObj.badRecordsLocation = badRecordsLocation;
     copyObj.isAggLoadRequest = isAggLoadRequest;
     copyObj.sortColumnsBoundsStr = sortColumnsBoundsStr;
@@ -860,14 +854,6 @@ public class CarbonLoadModel implements Serializable {
     this.sortScope = sortScope;
   }
 
-  public String getBatchSortSizeInMb() {
-    return batchSortSizeInMb;
-  }
-
-  public void setBatchSortSizeInMb(String batchSortSizeInMb) {
-    this.batchSortSizeInMb = batchSortSizeInMb;
-  }
-
   public String getGlobalSortPartitions() {
     return globalSortPartitions;
   }
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
index 1cdf93b..9d31cb4 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
@@ -254,7 +254,6 @@ public class CarbonLoadModelBuilder {
     carbonLoadModel.setSkipEmptyLine(optionsFinal.get("skip_empty_line"));
 
     carbonLoadModel.setSortScope(sort_scope);
-    carbonLoadModel.setBatchSortSizeInMb(optionsFinal.get("batch_sort_size_inmb"));
     carbonLoadModel.setGlobalSortPartitions(global_sort_partitions);
     carbonLoadModel.setUseOnePass(Boolean.parseBoolean(single_pass));
 
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
index 1ee79e9..0a69d2f 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
@@ -153,17 +153,6 @@ public class LoadOption {
 
     optionsFinal.put("maxcolumns", Maps.getOrDefault(options, "maxcolumns", null));
 
-    optionsFinal.put(
-        "batch_sort_size_inmb",
-        Maps.getOrDefault(
-            options,
-            "batch_sort_size_inmb",
-            CarbonProperties.getInstance().getProperty(
-                CarbonLoadOptionConstants.CARBON_OPTIONS_BATCH_SORT_SIZE_INMB,
-                CarbonProperties.getInstance().getProperty(
-                    CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-                    CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT))));
-
     String useOnePass = Maps.getOrDefault(
         options,
         "single_pass",
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
index af57759..0f87062 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/SorterFactory.java
@@ -21,15 +21,12 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.constants.SortScopeOptions;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.sort.impl.ParallelReadMergeSorterImpl;
 import org.apache.carbondata.processing.loading.sort.impl.ParallelReadMergeSorterWithColumnRangeImpl;
-import org.apache.carbondata.processing.loading.sort.impl.UnsafeBatchParallelReadMergeSorterImpl;
 import org.apache.carbondata.processing.loading.sort.impl.UnsafeParallelReadMergeSorterImpl;
 import org.apache.carbondata.processing.loading.sort.impl.UnsafeParallelReadMergeSorterWithColumnRangeImpl;
-import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
 
 import org.apache.log4j.Logger;
 
@@ -42,7 +39,6 @@ public class SorterFactory {
     boolean offheapsort = Boolean.parseBoolean(CarbonProperties.getInstance()
         .getProperty(CarbonCommonConstants.ENABLE_UNSAFE_SORT,
             CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT));
-    SortScopeOptions.SortScope sortScope = CarbonDataProcessorUtil.getSortScope(configuration);
     Sorter sorter;
     if (offheapsort) {
       if (configuration.getBucketingInfo() != null) {
@@ -65,15 +61,6 @@ public class SorterFactory {
         sorter = new ParallelReadMergeSorterImpl(counter);
       }
     }
-    if (sortScope.equals(SortScopeOptions.SortScope.BATCH_SORT)) {
-      if (configuration.getBucketingInfo() == null) {
-        sorter = new UnsafeBatchParallelReadMergeSorterImpl(counter);
-      } else {
-        LOGGER.warn(
-            "Batch sort is not enabled in case of bucketing. Falling back to " + sorter.getClass()
-                .getName());
-      }
-    }
     return sorter;
   }
 
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
deleted file mode 100644
index 57f18a3..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
+++ /dev/null
@@ -1,348 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.loading.sort.impl;
-
-import java.io.File;
-import java.util.Iterator;
-import java.util.List;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
-import org.apache.carbondata.core.datastore.row.CarbonRow;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
-import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
-import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
-import org.apache.carbondata.processing.loading.row.CarbonSortBatch;
-import org.apache.carbondata.processing.loading.sort.AbstractMergeSorter;
-import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
-import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeSortDataRows;
-import org.apache.carbondata.processing.loading.sort.unsafe.merger.UnsafeIntermediateMerger;
-import org.apache.carbondata.processing.loading.sort.unsafe.merger.UnsafeSingleThreadFinalSortFilesMerger;
-import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
-import org.apache.carbondata.processing.sort.sortdata.SortParameters;
-import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
-
-import org.apache.log4j.Logger;
-
-/**
- * It parallely reads data from array of iterates and do merge sort.
- * It sorts data in batches and send to the next step.
- */
-public class UnsafeBatchParallelReadMergeSorterImpl extends AbstractMergeSorter {
-
-  private static final Logger LOGGER =
-      LogServiceFactory.getLogService(UnsafeBatchParallelReadMergeSorterImpl.class.getName());
-
-  private SortParameters sortParameters;
-
-  private ExecutorService executorService;
-
-  private AtomicLong rowCounter;
-
-  /* will be incremented for each batch. This ID is used in sort temp files name,
-   to identify files of that batch */
-  private AtomicInteger batchId;
-
-  public UnsafeBatchParallelReadMergeSorterImpl(AtomicLong rowCounter) {
-    this.rowCounter = rowCounter;
-  }
-
-  @Override
-  public void initialize(SortParameters sortParameters) {
-    this.sortParameters = sortParameters;
-    batchId = new AtomicInteger(0);
-
-  }
-
-  @Override
-  public Iterator<CarbonRowBatch>[] sort(Iterator<CarbonRowBatch>[] iterators)
-      throws CarbonDataLoadingException {
-    this.executorService = Executors.newFixedThreadPool(iterators.length);
-    this.threadStatusObserver = new ThreadStatusObserver(this.executorService);
-    int batchSize = CarbonProperties.getInstance().getBatchSize();
-    final SortBatchHolder sortBatchHolder = new SortBatchHolder(sortParameters, iterators.length,
-        this.threadStatusObserver);
-
-    try {
-      for (int i = 0; i < iterators.length; i++) {
-        executorService.execute(
-            new SortIteratorThread(iterators[i], sortBatchHolder, batchSize, rowCounter,
-                this.threadStatusObserver));
-      }
-    } catch (Exception e) {
-      checkError();
-      throw new CarbonDataLoadingException("Problem while shutdown the server ", e);
-    }
-    checkError();
-    // Creates the iterator to read from merge sorter.
-    Iterator<CarbonSortBatch> batchIterator = new CarbonIterator<CarbonSortBatch>() {
-
-      @Override
-      public boolean hasNext() {
-        return sortBatchHolder.hasNext();
-      }
-
-      @Override
-      public CarbonSortBatch next() {
-        return new CarbonSortBatch(sortBatchHolder.next());
-      }
-    };
-    return new Iterator[] { batchIterator };
-  }
-
-  @Override
-  public void close() {
-    executorService.shutdown();
-    try {
-      executorService.awaitTermination(2, TimeUnit.DAYS);
-    } catch (InterruptedException e) {
-      LOGGER.error(e.getMessage(), e);
-    }
-  }
-
-  /**
-   * This thread iterates the iterator and adds the rows
-   */
-  private static class SortIteratorThread implements Runnable {
-
-    private Iterator<CarbonRowBatch> iterator;
-
-    private SortBatchHolder sortDataRows;
-
-    private Object[][] buffer;
-
-    private AtomicLong rowCounter;
-
-    private ThreadStatusObserver threadStatusObserver;
-
-    public SortIteratorThread(Iterator<CarbonRowBatch> iterator, SortBatchHolder sortDataRows,
-        int batchSize, AtomicLong rowCounter, ThreadStatusObserver threadStatusObserver) {
-      this.iterator = iterator;
-      this.sortDataRows = sortDataRows;
-      this.buffer = new Object[batchSize][];
-      this.rowCounter = rowCounter;
-      this.threadStatusObserver = threadStatusObserver;
-    }
-
-    @Override
-    public void run() {
-      try {
-        while (iterator.hasNext()) {
-          CarbonRowBatch batch = iterator.next();
-          int i = 0;
-          while (batch.hasNext()) {
-            CarbonRow row = batch.next();
-            if (row != null) {
-              buffer[i++] = row.getData();
-            }
-          }
-          if (i > 0) {
-            synchronized (sortDataRows) {
-              sortDataRows.getSortDataRow().addRowBatchWithOutSync(buffer, i);
-              rowCounter.getAndAdd(i);
-              if (!sortDataRows.getSortDataRow().canAdd()) {
-                sortDataRows.finish(false);
-                sortDataRows.createSortDataRows();
-              }
-            }
-          }
-        }
-      } catch (Exception e) {
-        LOGGER.error(e.getMessage(), e);
-        this.threadStatusObserver.notifyFailed(e);
-      } finally {
-        synchronized (sortDataRows) {
-          sortDataRows.finishThread();
-        }
-      }
-    }
-
-  }
-
-  private class SortBatchHolder
-      extends CarbonIterator<UnsafeSingleThreadFinalSortFilesMerger> {
-
-    private SortParameters sortParameters;
-
-    private UnsafeSingleThreadFinalSortFilesMerger finalMerger;
-
-    private UnsafeIntermediateMerger unsafeIntermediateFileMerger;
-
-    private UnsafeSortDataRows sortDataRow;
-
-    private final BlockingQueue<UnsafeSingleThreadFinalSortFilesMerger> mergerQueue;
-
-    private AtomicInteger iteratorCount;
-
-    private int batchCount;
-
-    private ThreadStatusObserver threadStatusObserver;
-
-    private final Object lock = new Object();
-
-    SortBatchHolder(SortParameters sortParameters, int numberOfThreads,
-        ThreadStatusObserver threadStatusObserver) {
-      this.sortParameters = sortParameters.getCopy();
-      this.iteratorCount = new AtomicInteger(numberOfThreads);
-      this.mergerQueue = new LinkedBlockingQueue<>(1);
-      this.threadStatusObserver = threadStatusObserver;
-      createSortDataRows();
-    }
-
-    private void createSortDataRows() {
-      // For each batch, createSortDataRows() will be called.
-      // Files saved to disk during sorting of previous batch,should not be considered
-      // for this batch.
-      // Hence use batchID as rangeID field of sorttempfiles.
-      // so getFilesToMergeSort() will select only this batch files.
-      this.sortParameters.setRangeId(batchId.incrementAndGet());
-      int inMemoryChunkSizeInMB = CarbonProperties.getInstance().getSortMemoryChunkSizeInMB();
-      setTempLocation(sortParameters);
-      this.finalMerger = new UnsafeSingleThreadFinalSortFilesMerger(sortParameters,
-          sortParameters.getTempFileLocation());
-      unsafeIntermediateFileMerger = new UnsafeIntermediateMerger(sortParameters);
-      sortDataRow = new UnsafeSortDataRows(sortParameters, unsafeIntermediateFileMerger,
-          inMemoryChunkSizeInMB);
-
-      try {
-        sortDataRow.initialize();
-      } catch (Exception e) {
-        throw new CarbonDataLoadingException(e);
-      }
-      batchCount++;
-    }
-
-    private void setTempLocation(SortParameters parameters) {
-      String[] carbonDataDirectoryPath = CarbonDataProcessorUtil
-          .getLocalDataFolderLocation(parameters.getCarbonTable(), parameters.getTaskNo(),
-              parameters.getSegmentId(), false, false);
-      String[] tempDirs = CarbonDataProcessorUtil.arrayAppend(carbonDataDirectoryPath,
-          File.separator, CarbonCommonConstants.SORT_TEMP_FILE_LOCATION);
-      parameters.setTempFileLocation(tempDirs);
-    }
-
-    @Override
-    public UnsafeSingleThreadFinalSortFilesMerger next() {
-      try {
-        UnsafeSingleThreadFinalSortFilesMerger unsafeSingleThreadFinalSortFilesMerger =
-            mergerQueue.take();
-        if (unsafeSingleThreadFinalSortFilesMerger.isStopProcess()) {
-          throw new RuntimeException(threadStatusObserver.getThrowable());
-        }
-        return unsafeSingleThreadFinalSortFilesMerger;
-      } catch (InterruptedException e) {
-        throw new RuntimeException(e);
-      }
-    }
-
-    public UnsafeSortDataRows getSortDataRow() {
-      return sortDataRow;
-    }
-
-    public void finish(boolean isFinalAttempt) {
-      try {
-        // if the mergerQue is empty and some CarbonDataLoadingException exception has occurred
-        // then set stop process to true in the finalmerger instance
-        if (mergerQueue.isEmpty() && threadStatusObserver != null
-            && threadStatusObserver.getThrowable() != null && threadStatusObserver
-            .getThrowable() instanceof CarbonDataLoadingException) {
-          finalMerger.setStopProcess(true);
-          if (isFinalAttempt) {
-            iteratorCount.decrementAndGet();
-          }
-          mergerQueue.put(finalMerger);
-          return;
-        }
-        processRowToNextStep(sortDataRow, sortParameters);
-        unsafeIntermediateFileMerger.finish();
-        List<UnsafeCarbonRowPage> rowPages = unsafeIntermediateFileMerger.getRowPages();
-        finalMerger.startFinalMerge(rowPages.toArray(new UnsafeCarbonRowPage[rowPages.size()]),
-            unsafeIntermediateFileMerger.getMergedPages());
-        unsafeIntermediateFileMerger.close();
-        if (isFinalAttempt) {
-          iteratorCount.decrementAndGet();
-        }
-        mergerQueue.put(finalMerger);
-        sortDataRow = null;
-        unsafeIntermediateFileMerger = null;
-        finalMerger = null;
-      } catch (CarbonDataWriterException e) {
-        throw new CarbonDataLoadingException(e);
-      } catch (CarbonSortKeyAndGroupByException e) {
-        throw new CarbonDataLoadingException(e);
-      } catch (InterruptedException e) {
-        // if fails to put in queue because of interrupted exception, we can offer to free the main
-        // thread from waiting.
-        if (finalMerger != null) {
-          finalMerger.setStopProcess(true);
-          boolean offered = mergerQueue.offer(finalMerger);
-          if (!offered) {
-            throw new CarbonDataLoadingException(e);
-          }
-        }
-        throw new CarbonDataLoadingException(e);
-      }
-    }
-
-    public void finishThread() {
-      synchronized (lock) {
-        if (iteratorCount.get() <= 1) {
-          finish(true);
-        } else {
-          iteratorCount.decrementAndGet();
-        }
-      }
-    }
-
-    public boolean hasNext() {
-      return iteratorCount.get() > 0 || !mergerQueue.isEmpty();
-    }
-
-    /**
-     * Below method will be used to process data to next step
-     */
-    private boolean processRowToNextStep(UnsafeSortDataRows sortDataRows, SortParameters parameters)
-        throws CarbonDataLoadingException {
-      try {
-        // start sorting
-        sortDataRows.startSorting();
-
-        // check any more rows are present
-        LOGGER.info("Record Processed For table: " + parameters.getTableName());
-        CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
-            .recordSortRowsStepTotalTime(parameters.getPartitionID(), System.currentTimeMillis());
-        CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
-            .recordDictionaryValuesTotalTime(parameters.getPartitionID(),
-                System.currentTimeMillis());
-        return false;
-      } catch (Exception e) {
-        throw new CarbonDataLoadingException(e);
-      }
-    }
-  }
-}
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
index 3248bae..3bfbfcf 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
@@ -117,14 +117,8 @@ public class UnsafeSortDataRows {
         .getProperty(CarbonCommonConstants.ENABLE_INMEMORY_MERGE_SORT,
             CarbonCommonConstants.ENABLE_INMEMORY_MERGE_SORT_DEFAULT));
 
-    this.maxSizeAllowed = parameters.getBatchSortSizeinMb();
-    if (maxSizeAllowed <= 0) {
-      // If user does not input any memory size, then take half the size of usable memory configured
-      // in sort memory size.
-      this.maxSizeAllowed = UnsafeMemoryManager.INSTANCE.getUsableMemory() / 2;
-    } else {
-      this.maxSizeAllowed = this.maxSizeAllowed * 1024L * 1024L;
-    }
+    // Take half the size of usable memory configured in sort memory size.
+    this.maxSizeAllowed = UnsafeMemoryManager.INSTANCE.getUsableMemory() / 2;
   }
 
   /**
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
deleted file mode 100644
index 1622060..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.loading.steps;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
-import org.apache.carbondata.core.datastore.row.CarbonRow;
-import org.apache.carbondata.core.localdictionary.generator.LocalDictionaryGenerator;
-import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
-import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.processing.datamap.DataMapWriterListener;
-import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
-import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
-import org.apache.carbondata.processing.loading.exception.BadRecordFoundException;
-import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
-import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
-import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
-import org.apache.carbondata.processing.store.CarbonFactHandler;
-import org.apache.carbondata.processing.store.CarbonFactHandlerFactory;
-import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
-
-import org.apache.log4j.Logger;
-
-/**
- * It reads data from batch of sorted files(it could be in-memory/disk based files)
- * which are generated in previous sort step. And it writes data to carbondata file.
- * It also generates mdk key while writing to carbondata file
- */
-public class DataWriterBatchProcessorStepImpl extends AbstractDataLoadProcessorStep {
-
-  private static final Logger LOGGER =
-      LogServiceFactory.getLogService(DataWriterBatchProcessorStepImpl.class.getName());
-
-  private Map<String, LocalDictionaryGenerator> localDictionaryGeneratorMap;
-
-  private CarbonFactHandler carbonFactHandler;
-
-  public DataWriterBatchProcessorStepImpl(CarbonDataLoadConfiguration configuration,
-      AbstractDataLoadProcessorStep child) {
-    super(configuration, child);
-    this.localDictionaryGeneratorMap =
-        CarbonUtil.getLocalDictionaryModel(configuration.getTableSpec().getCarbonTable());
-  }
-
-  @Override
-  public void initialize() throws IOException {
-    super.initialize();
-    child.initialize();
-  }
-
-  private String[] getStoreLocation() {
-    return CarbonDataProcessorUtil
-        .getLocalDataFolderLocation(configuration.getTableSpec().getCarbonTable(),
-            String.valueOf(configuration.getTaskNo()), configuration.getSegmentId(), false, false);
-  }
-
-  @Override
-  public Iterator<CarbonRowBatch>[] execute() throws CarbonDataLoadingException {
-    Iterator<CarbonRowBatch>[] iterators = child.execute();
-    CarbonTableIdentifier tableIdentifier =
-        configuration.getTableIdentifier().getCarbonTableIdentifier();
-    String tableName = tableIdentifier.getTableName();
-    try {
-      CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
-          .recordDictionaryValue2MdkAdd2FileTime(CarbonTablePath.DEPRECATED_PARTITION_ID,
-              System.currentTimeMillis());
-      int i = 0;
-      String[] storeLocation = getStoreLocation();
-      CarbonDataProcessorUtil.createLocations(storeLocation);
-      for (Iterator<CarbonRowBatch> iterator : iterators) {
-        int k = 0;
-        while (iterator.hasNext()) {
-          CarbonRowBatch next = iterator.next();
-          // If no rows from merge sorter, then don't create a file in fact column handler
-          if (next.hasNext()) {
-            DataMapWriterListener listener = getDataMapWriterListener(0);
-            CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel
-                .createCarbonFactDataHandlerModel(configuration, storeLocation, i, k++, listener);
-            model.setColumnLocalDictGenMap(this.localDictionaryGeneratorMap);
-            this.carbonFactHandler = CarbonFactHandlerFactory
-                .createCarbonFactHandler(model);
-            carbonFactHandler.initialise();
-            processBatch(next, carbonFactHandler);
-            try {
-              finish(tableName, carbonFactHandler);
-            } finally {
-              // we need to make carbonFactHandler =null as finish will call closehandler
-              // even finish throws exception
-              // otherwise close() will call finish method again for same handler.
-              this.carbonFactHandler = null;
-            }
-          }
-        }
-        i++;
-      }
-    } catch (Exception e) {
-      LOGGER.error("Failed for table: " + tableName + " in DataWriterBatchProcessorStepImpl", e);
-      if (e.getCause() instanceof BadRecordFoundException) {
-        throw new BadRecordFoundException(e.getCause().getMessage());
-      }
-      throw new CarbonDataLoadingException("There is an unexpected error: " + e.getMessage(), e);
-    }
-    return null;
-  }
-
-  @Override
-  protected String getStepName() {
-    return "Data Batch Writer";
-  }
-
-  private void finish(String tableName, CarbonFactHandler dataHandler) {
-    CarbonDataWriterException exception = null;
-    try {
-      dataHandler.finish();
-    } catch (Exception e) {
-      // if throw exception from here dataHandler will not be closed.
-      // so just holding exception and later throwing exception
-      LOGGER.error("Failed for table: " + tableName + " in  finishing data handler", e);
-      exception = new CarbonDataWriterException(
-          "Failed for table: " + tableName + " in  finishing data handler", e);
-    }
-    CarbonTimeStatisticsFactory.getLoadStatisticsInstance().recordTotalRecords(rowCounter.get());
-    try {
-      processingComplete(dataHandler);
-    } catch (Exception e) {
-      if (null == exception) {
-        exception = new CarbonDataWriterException(e);
-      }
-    }
-    CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
-        .recordDictionaryValue2MdkAdd2FileTime(CarbonTablePath.DEPRECATED_PARTITION_ID,
-            System.currentTimeMillis());
-    CarbonTimeStatisticsFactory.getLoadStatisticsInstance()
-        .recordMdkGenerateTotalTime(CarbonTablePath.DEPRECATED_PARTITION_ID,
-            System.currentTimeMillis());
-    if (null != exception) {
-      throw exception;
-    }
-  }
-
-  private void processingComplete(CarbonFactHandler dataHandler) {
-    if (null != dataHandler) {
-      try {
-        dataHandler.closeHandler();
-      } catch (Exception e) {
-        LOGGER.error(e.getMessage(), e);
-        throw new CarbonDataLoadingException(
-            "There is an unexpected error while closing data handler", e);
-      }
-    }
-  }
-
-  private void processBatch(CarbonRowBatch batch, CarbonFactHandler dataHandler) throws Exception {
-    int batchSize = 0;
-    while (batch.hasNext()) {
-      CarbonRow row = batch.next();
-      dataHandler.addDataToStore(row);
-      batchSize++;
-    }
-    batch.close();
-    rowCounter.getAndAdd(batchSize);
-  }
-
-  @Override
-  public void close() {
-    if (!closed) {
-      super.close();
-      if (null != this.carbonFactHandler) {
-        carbonFactHandler.finish();
-        carbonFactHandler.closeHandler();
-      }
-    }
-  }
-}
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
index 025fb1c..cb95226 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SortParameters.java
@@ -138,7 +138,6 @@ public class SortParameters implements Serializable {
 
   private int numberOfCores;
 
-  private int batchSortSizeinMb;
   private int rangeId = 0;
 
   /**
@@ -190,7 +189,6 @@ public class SortParameters implements Serializable {
     parameters.numberOfSortColumns = numberOfSortColumns;
     parameters.numberOfNoDictSortColumns = numberOfNoDictSortColumns;
     parameters.numberOfCores = numberOfCores;
-    parameters.batchSortSizeinMb = batchSortSizeinMb;
     parameters.rangeId = rangeId;
     parameters.carbonTable = carbonTable;
     parameters.isUpdateDictDims = isUpdateDictDims;
@@ -393,14 +391,6 @@ public class SortParameters implements Serializable {
     this.numberOfNoDictSortColumns = Math.min(numberOfNoDictSortColumns, noDictionaryCount);
   }
 
-  public int getBatchSortSizeinMb() {
-    return batchSortSizeinMb;
-  }
-
-  public void setBatchSortSizeinMb(int batchSortSizeinMb) {
-    this.batchSortSizeinMb = batchSortSizeinMb;
-  }
-
   public void setCarbonTable(CarbonTable carbonTable) {
     this.carbonTable = carbonTable;
   }
@@ -439,8 +429,6 @@ public class SortParameters implements Serializable {
         CarbonDataProcessorUtil.getNoDictionaryMapping(configuration.getDataFields()));
     parameters.setIsVarcharDimensionColumn(
         CarbonDataProcessorUtil.getIsVarcharColumnMapping(configuration.getDataFields()));
-    parameters.setBatchSortSizeinMb(CarbonDataProcessorUtil.getBatchSortSizeinMb(configuration));
-
     parameters.setNumberOfSortColumns(configuration.getNumberOfSortColumns());
     parameters.setNumberOfNoDictSortColumns(configuration.getNumberOfNoDictSortColumns());
     parameters.setNoDictionarySortColumn(CarbonDataProcessorUtil
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index 6dabf20..5e5dcf5 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -527,34 +527,6 @@ public final class CarbonDataProcessorUtil {
   }
 
   /**
-   * Get the batch sort size
-   * @param configuration
-   * @return
-   */
-  public static int getBatchSortSizeinMb(CarbonDataLoadConfiguration configuration) {
-    int batchSortSizeInMb;
-    try {
-      // First try get from user input from ddl , otherwise get from carbon properties.
-      if (configuration.getDataLoadProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB)
-          == null) {
-        batchSortSizeInMb = Integer.parseInt(CarbonProperties.getInstance()
-            .getProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB,
-                CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB_DEFAULT));
-      } else {
-        batchSortSizeInMb = Integer.parseInt(
-            configuration.getDataLoadProperty(CarbonCommonConstants.LOAD_BATCH_SORT_SIZE_INMB)
-                .toString());
-      }
-      LOGGER.info("batch sort size is set to " + batchSortSizeInMb);
-    } catch (Exception e) {
-      batchSortSizeInMb = 0;
-      LOGGER.warn("Exception occured while resolving batch sort size. " +
-          "batch sort size is set to " + batchSortSizeInMb);
-    }
-    return batchSortSizeInMb;
-  }
-
-  /**
    * Get the number of partitions in global sort
    * @param globalSortPartitions
    * @return the number of partitions
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index b1985de..8865303 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -294,7 +294,7 @@ public class CarbonWriterBuilder {
    * d. local_dictionary_enable -- true / false. Default is false
    * e. sort_columns -- comma separated column. "c1,c2". Default all dimensions are sorted.
    *                    If empty string "" is passed. No columns are sorted
-   * j. sort_scope -- "local_sort", "no_sort", "batch_sort". default value is "local_sort"
+   * j. sort_scope -- "local_sort", "no_sort". default value is "local_sort"
    * k. long_string_columns -- comma separated string columns which are more than 32k length.
    *                           default value is null.
    * l. inverted_index -- comma separated string columns for which inverted index needs to be