You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by in...@apache.org on 2022/06/02 05:35:45 UTC

[carbondata] branch master updated: [CARBONDATA-4335] Disable MV by default

This is an automated email from the ASF dual-hosted git repository.

indhumuthumurugesh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 33408be14b [CARBONDATA-4335] Disable MV by default
33408be14b is described below

commit 33408be14b218bd6a7d9b5191a158c37de6d0c81
Author: Mahesh Raju Somalaraju <ma...@huawei.com>
AuthorDate: Tue May 10 19:46:44 2022 +0530

    [CARBONDATA-4335] Disable MV by default
    
    Why is this PR needed?
    Currently materialized view(mv) is enabled by default. In concurrent scenarios
    with default mv enabled each session is going through the list of databases
    even though mv not used. Due to this query time increased.
    
    What changes were proposed in this PR?
    Disable mv by default as users using mv rarely. If user required then enable
    and use it.
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    Yes
    
    This closes #4264
---
 .../carbondata/core/constants/CarbonCommonConstants.java     |  2 +-
 docs/configuration-parameters.md                             |  2 +-
 .../org/apache/carbon/flink/TestCarbonPartitionWriter.scala  |  5 +++--
 .../scala/org/apache/carbon/flink/TestCarbonWriter.scala     |  3 +++
 .../src/test/scala/org/apache/carbondata/view/MVTest.scala   | 12 +++++-------
 .../apache/carbondata/view/rewrite/MVCoalesceTestCase.scala  |  2 ++
 .../carbondata/view/rewrite/MVCountAndCaseTestCase.scala     |  2 ++
 .../apache/carbondata/view/rewrite/MVCreateTestCase.scala    |  2 ++
 .../apache/carbondata/view/rewrite/MVFilterAndJoinTest.scala |  2 ++
 .../view/rewrite/MVIncrementalLoadingTestcase.scala          |  3 +++
 .../apache/carbondata/view/rewrite/MVInvalidTestCase.scala   |  2 ++
 .../apache/carbondata/view/rewrite/MVMultiJoinTestCase.scala |  2 ++
 .../apache/carbondata/view/rewrite/MVRewriteTestCase.scala   |  2 ++
 .../apache/carbondata/view/rewrite/MVSampleTestCase.scala    |  3 ++-
 .../org/apache/carbondata/view/rewrite/MVTPCDSTestCase.scala |  3 ++-
 .../org/apache/carbondata/view/rewrite/MVTpchTestCase.scala  |  2 ++
 .../carbondata/view/rewrite/SelectAllColumnsSuite.scala      |  2 ++
 .../carbondata/view/rewrite/TestAllOperationsOnMV.scala      |  3 +++
 .../apache/carbondata/view/rewrite/TestPartitionWithMV.scala |  2 ++
 .../view/timeseries/TestCreateMVWithTimeSeries.scala         |  2 ++
 .../view/timeseries/TestMVTimeSeriesLoadAndQuery.scala       |  2 ++
 .../view/timeseries/TestMVTimeSeriesQueryRollUp.scala        |  2 ++
 .../spark/carbondata/register/TestRegisterCarbonTable.scala  |  2 ++
 23 files changed, 51 insertions(+), 13 deletions(-)

diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index fb45bccba6..663f7d21cb 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1231,7 +1231,7 @@ public final class CarbonCommonConstants {
   @CarbonProperty(dynamicConfigurable = true)
   public static final String CARBON_ENABLE_MV = "carbon.enable.mv";
 
-  public static final String CARBON_ENABLE_MV_DEFAULT = "true";
+  public static final String CARBON_ENABLE_MV_DEFAULT = "false";
 
   /**
    * Related mv table's map for a fact table
diff --git a/docs/configuration-parameters.md b/docs/configuration-parameters.md
index 1225190a6d..39064d4ae9 100644
--- a/docs/configuration-parameters.md
+++ b/docs/configuration-parameters.md
@@ -143,7 +143,7 @@ This section provides the details of all the configurations required for the Car
 | carbon.scheduler.min.registered.resources.ratio | 0.8 | Specifies the minimum resource (executor) ratio needed for starting the block distribution. The default value is 0.8, which indicates 80% of the requested resource is allocated for starting block distribution. The minimum value is 0.1 min and the maximum value is 1.0. |
 | carbon.detail.batch.size | 100 | The buffer size to store records, returned from the block scan. In limit scenario this parameter is very important. For example your query limit is 1000. But if we set this value to 3000 that means we get 3000 records from scan but spark will only take 1000 rows. So the 2000 remaining are useless. In one Finance test case after we set it to 100, in the limit 1000 scenario the performance increase about 2 times in comparison to if we set this value to 12 [...]
 | carbon.enable.vector.reader | true | Spark added vector processing to optimize cpu cache miss and there by increase the query performance. This configuration enables to fetch data as columnar batch of size 4*1024 rows instead of fetching data row by row and provide it to spark so that there is improvement in  select queries performance. |
-| carbon.enable.mv | true | Whether to rewrite the query plan based on the materialized views, Default value is true |
+| carbon.enable.mv | false | Whether to rewrite the query plan based on the materialized views, Default value is false |
 | carbon.task.distribution | block | CarbonData has its own scheduling algorithm to suggest to Spark on how many tasks needs to be launched and how much work each task need to do in a Spark cluster for any query on CarbonData. Each of these task distribution suggestions has its own advantages and disadvantages. Based on the customer use case, appropriate task distribution can be configured.**block**: Setting this value will launch one task per block. This setting is suggested in case of  [...]
 | carbon.custom.block.distribution | false | CarbonData has its own scheduling algorithm to suggest to Spark on how many tasks needs to be launched and how much work each task need to do in a Spark cluster for any query on CarbonData. When this configuration is true, CarbonData would distribute the available blocks to be scanned among the available number of cores. For Example:If there are 10 blocks to be scanned and only 3 tasks can be run(only 3 executor cores available in the cluster) [...]
 | enable.query.statistics | false | CarbonData has extensive logging which would be useful for debugging issues related to performance or hard to locate issues. This configuration when made ***true*** would log additional query statistics information to more accurately locate the issues being debugged. **NOTE:** Enabling this would log more debug information to log files, there by increasing the log files size significantly in short span of time. It is advised to configure the log files  [...]
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
index fbb71bee55..845dc45ce8 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonPartitionWriter.scala
@@ -310,7 +310,7 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll {
     sql(s"drop materialized view if exists mv_1")
     sql("create materialized view mv_1 " +
         s"as select stringField, shortField from $tableName where intField=9")
-
+    sql("set carbon.enable.mv = true")
     try {
       val tablePath = storeLocation + "/" + tableName + "/"
       val writerProperties = newWriterProperties(dataTempPath)
@@ -335,7 +335,8 @@ class TestCarbonPartitionWriter extends QueryTest with BeforeAndAfterAll {
       }
       assert(tables.exists(_.identifier.table.equalsIgnoreCase("mv_1")))
       checkAnswer(df, Seq(Row("test9", 12345)))
-
+    } finally {
+      sql("set carbon.enable.mv = false")
     }
   }
 
diff --git a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
index 4802640e71..747db5595e 100644
--- a/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
+++ b/integration/flink/src/test/scala/org/apache/carbon/flink/TestCarbonWriter.scala
@@ -231,6 +231,7 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
     sql(s"drop materialized view if exists mv_1")
     sql(s"create materialized view mv_1 " +
         s"as select stringField, shortField from $tableName where intField=99 ")
+    sql("set carbon.enable.mv = true")
     try {
       val tablePath = storeLocation + "/" + tableName + "/"
       val writerProperties = newWriterProperties(dataTempPath)
@@ -252,6 +253,8 @@ class TestCarbonWriter extends QueryTest with BeforeAndAfterAll{
       assert(tables.exists(_.identifier.table.equalsIgnoreCase("mv_1")))
       checkAnswer(df, Seq(Row("test99", 12345)))
       checkIfStageFilesAreDeleted(tablePath)
+    } finally {
+      sql("set carbon.enable.mv = false")
     }
   }
 
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/MVTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/MVTest.scala
index c2ef20bf06..1678a6d034 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/MVTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/MVTest.scala
@@ -51,6 +51,7 @@ class MVTest extends QueryTest with BeforeAndAfterAll {
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
+    sql("set carbon.enable.mv = true")
   }
 
   test("test create mv on hive table") {
@@ -76,19 +77,18 @@ class MVTest extends QueryTest with BeforeAndAfterAll {
 
     // 2.  test disable mv with carbon.properties
     // 2.1 disable MV when set carbon.enable.mv = false in the carbonproperties
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_ENABLE_MV, "false")
+    sql("set carbon.enable.mv = false")
     df = sql("select empname, avg(salary) from source group by empname")
     assert(!isTableAppearedInPlan(df.queryExecution.optimizedPlan, "mv1"))
-
     // 2.2 enable MV when configuared value is invalid
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_ENABLE_MV, "invalidvalue")
     df = sql("select empname, avg(salary) from source group by empname")
-    assert(isTableAppearedInPlan(df.queryExecution.optimizedPlan, "mv1"))
+    assert(!isTableAppearedInPlan(df.queryExecution.optimizedPlan, "mv1"))
 
     // 2.3 enable mv when set carbon.enable.mv = true in the carbonproperties
+    sql("set carbon.enable.mv = true")
     df = sql("select empname, avg(salary) from source group by empname")
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_ENABLE_MV, "true")
     assert(isTableAppearedInPlan(df.queryExecution.optimizedPlan, "mv1"))
 
     // 3.  test disable mv with sessionparam
@@ -107,9 +107,6 @@ class MVTest extends QueryTest with BeforeAndAfterAll {
     sql("set carbon.enable.mv = true")
     df = sql("select empname, avg(salary) from source group by empname")
     assert(isTableAppearedInPlan(df.queryExecution.optimizedPlan, "mv1"))
-
-    ThreadLocalSessionInfo.getCarbonSessionInfo.
-      getSessionParams.removeProperty(CarbonCommonConstants.CARBON_ENABLE_MV)
   }
 
   test("test create mv on orc table") {
@@ -392,6 +389,7 @@ class MVTest extends QueryTest with BeforeAndAfterAll {
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
         CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    sql("set carbon.enable.mv = false")
   }
 
   def drop(): Unit = {
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCoalesceTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCoalesceTestCase.scala
index 6a8f4dd0f4..7b35ea944c 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCoalesceTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCoalesceTestCase.scala
@@ -31,6 +31,7 @@ class MVCoalesceTestCase  extends QueryTest with BeforeAndAfterAll  {
     sql("insert into coalesce_test_main select 1,'tom',170,130")
     sql("insert into coalesce_test_main select 2,'tom',170,120")
     sql("insert into coalesce_test_main select 3,'lily',160,100")
+    sql("set carbon.enable.mv = true")
   }
 
   def drop(): Unit = {
@@ -86,6 +87,7 @@ class MVCoalesceTestCase  extends QueryTest with BeforeAndAfterAll  {
 
   override def afterAll(): Unit = {
     drop
+    sql("set carbon.enable.mv = false")
   }
 }
 
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCountAndCaseTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCountAndCaseTestCase.scala
index 063ca42c05..b13d43e291 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCountAndCaseTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCountAndCaseTestCase.scala
@@ -30,6 +30,7 @@ class MVCountAndCaseTestCase  extends QueryTest with BeforeAndAfterAll{
       s"""create table data_table(
          |starttime int, seq long,succ long,LAYER4ID string,tmp int)
          |using carbondata""".stripMargin)
+    sql("set carbon.enable.mv = true")
   }
 
   def drop(): Unit = {
@@ -86,5 +87,6 @@ class MVCountAndCaseTestCase  extends QueryTest with BeforeAndAfterAll{
 
   override def afterAll(): Unit = {
     drop
+    sql("set carbon.enable.mv = false")
   }
 }
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCreateTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCreateTestCase.scala
index 71fcb809a5..fde0ae82d0 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCreateTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVCreateTestCase.scala
@@ -104,6 +104,7 @@ class MVCreateTestCase extends QueryTest with BeforeAndAfterAll {
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
+    sql("set carbon.enable.mv = true")
   }
 
   test("test if partial query with group by hits mv when all columns present in mv") {
@@ -1567,6 +1568,7 @@ class MVCreateTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll {
     drop()
+    sql("set carbon.enable.mv = false")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
         CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVFilterAndJoinTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVFilterAndJoinTest.scala
index 232950f894..43c8efb090 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVFilterAndJoinTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVFilterAndJoinTest.scala
@@ -29,6 +29,7 @@ class MVFilterAndJoinTest extends QueryTest with BeforeAndAfterAll {
     sql("create table main_table (name string,age int,height int) STORED AS carbondata")
     sql("create table dim_table (name string,age int,height int) STORED AS carbondata")
     sql("create table sdr_table (name varchar(20),score int) STORED AS carbondata")
+    sql("set carbon.enable.mv = true")
   }
 
   def drop() {
@@ -71,6 +72,7 @@ class MVFilterAndJoinTest extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll(): Unit = {
     drop
+    sql("set carbon.enable.mv = false")
   }
 
 }
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVIncrementalLoadingTestcase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVIncrementalLoadingTestcase.scala
index c1ac180e9b..3e4083f5c3 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVIncrementalLoadingTestcase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVIncrementalLoadingTestcase.scala
@@ -44,6 +44,7 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists products1")
     sql("drop table if exists sales1")
     sql("drop materialized view if exists mv1")
+    sql("set carbon.enable.mv = true")
   }
 
   test("test Incremental Loading on refresh MV") {
@@ -344,6 +345,7 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
     val df = sql("select a, sum(c) from main_table  group by a")
     assert(!TestUtil.verifyMVHit(df.queryExecution.optimizedPlan, "mv1"))
     defaultConfig()
+    sql("set carbon.enable.mv = true")
     sqlContext.sparkSession.conf.unset("carbon.input.segments.default.main_table")
     checkAnswer(sql("select a, sum(c) from main_table  group by a"), Seq(Row("a", 1), Row("b", 2)))
     val df1 = sql("select a, sum(c) from main_table  group by a")
@@ -672,6 +674,7 @@ class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
     sql("drop table IF EXISTS test_table1")
     sql("drop table IF EXISTS main_table")
     sql("drop table IF EXISTS dimensiontable")
+    sql("set carbon.enable.mv = false")
   }
 
   private def createTableFactTable(tableName: String) = {
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVInvalidTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVInvalidTestCase.scala
index 5b325bd47e..c5f83bfc2b 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVInvalidTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVInvalidTestCase.scala
@@ -25,6 +25,7 @@ class MVInvalidTestCase  extends QueryTest with BeforeAndAfterAll {
   override def beforeAll(): Unit = {
     drop
     sql("create table main_table (name string,age int,height int) STORED AS carbondata")
+    sql("set carbon.enable.mv = true")
   }
 
   def drop {
@@ -45,5 +46,6 @@ class MVInvalidTestCase  extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll(): Unit = {
     drop
+    sql("set carbon.enable.mv = false")
   }
 }
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVMultiJoinTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVMultiJoinTestCase.scala
index c7931b480b..c078c7d50a 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVMultiJoinTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVMultiJoinTestCase.scala
@@ -28,10 +28,12 @@ class MVMultiJoinTestCase extends QueryTest with BeforeAndAfterAll {
     sql("create table dim_table(name string,age int,height int) using carbondata")
     sql("create table sdr_table(name varchar(20), score int) using carbondata")
     sql("create table areas(aid int, title string, pid int) using carbondata")
+    sql("set carbon.enable.mv = true")
   }
 
   override def afterAll() {
     drop
+    sql("set carbon.enable.mv = false")
   }
 
   test("test mv self join") {
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVRewriteTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVRewriteTestCase.scala
index 3f66b8be4b..3eef4fa5cf 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVRewriteTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVRewriteTestCase.scala
@@ -28,6 +28,7 @@ class MVRewriteTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""create table data_table(
         |starttime int, seq long,succ long,LAYER4ID string,tmp int)
         |using carbondata""".stripMargin)
+    sql("set carbon.enable.mv = true")
   }
 
   def drop(): Unit = {
@@ -83,5 +84,6 @@ class MVRewriteTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll(): Unit = {
     drop
+    sql("set carbon.enable.mv = false")
   }
 }
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVSampleTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVSampleTestCase.scala
index 0e57a5a3a3..6db21987ee 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVSampleTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVSampleTestCase.scala
@@ -39,7 +39,7 @@ class MVSampleTestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop database if exists sample cascade")
     sql("create database sample")
     sql("use sample")
-
+    sql("set carbon.enable.mv = true")
     createTables.map(sql)
 
   }
@@ -150,6 +150,7 @@ class MVSampleTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll {
     drop()
+    sql("set carbon.enable.mv = false")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
         CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTPCDSTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTPCDSTestCase.scala
index 28155aaabb..75ea44e795 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTPCDSTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTPCDSTestCase.scala
@@ -40,7 +40,7 @@ class MVTPCDSTestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop database if exists tpcds cascade")
     sql("create database tpcds")
     sql("use tpcds")
-
+    sql("set carbon.enable.mv = true")
     tpcds1_4Tables.foreach { create_table =>
       sql(create_table)
     }
@@ -126,6 +126,7 @@ class MVTPCDSTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll {
     drop()
+    sql("set carbon.enable.mv = false")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
         CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTpchTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTpchTestCase.scala
index 8fd485464e..268c643338 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTpchTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVTpchTestCase.scala
@@ -60,6 +60,7 @@ class MVTpchTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""load data inpath "$resourcesPath/tpch/region.csv" into table REGION1 options('DELIMITER'='|','FILEHEADER'='R_REGIONKEY,R_NAME,R_COMMENT')""")
     sql(s"""load data inpath "$resourcesPath/tpch/nation.csv" into table NATION1 options('DELIMITER'='|','FILEHEADER'='N_NATIONKEY,N_NAME,N_REGIONKEY,N_COMMENT')""")
     sql(s"""load data inpath "$resourcesPath/tpch/supplier.csv" into table SUPPLIER1 options('DELIMITER'='|','FILEHEADER'='S_SUPPKEY,S_NAME,S_ADDRESS,S_NATIONKEY,S_PHONE,S_ACCTBAL,S_COMMENT')""")
+    sql("set carbon.enable.mv = true")
   }
 
   test("test create materialized view with tpch1") {
@@ -216,6 +217,7 @@ class MVTpchTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    sql("set carbon.enable.mv = false")
 //    drop()
   }
   // scalastyle:on lineLength
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/SelectAllColumnsSuite.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/SelectAllColumnsSuite.scala
index d444b3a31b..86d1f2ef62 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/SelectAllColumnsSuite.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/SelectAllColumnsSuite.scala
@@ -28,6 +28,7 @@ class SelectAllColumnsSuite extends QueryTest {
     sql("create table all_table(name string, age int, height int) STORED AS carbondata")
     sql("insert into all_table select 'tom',20,175")
     sql("insert into all_table select 'tom',32,180")
+    sql("set carbon.enable.mv = true")
     sql("create materialized view all_table_mv " +
         "as select avg(age),avg(height),name from all_table group by name")
     sql("refresh materialized view all_table_mv")
@@ -37,6 +38,7 @@ class SelectAllColumnsSuite extends QueryTest {
     val frame = sql("select avg(age),avg(height),name from all_table group by name")
     assert(TestUtil.verifyMVHit(frame.queryExecution.optimizedPlan, "all_table_mv"))
     sql("drop table if exists all_table")
+    sql("set carbon.enable.mv = false")
   }
 
 }
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestAllOperationsOnMV.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestAllOperationsOnMV.scala
index 8b7f678af7..20586dab88 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestAllOperationsOnMV.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestAllOperationsOnMV.scala
@@ -40,6 +40,7 @@ import org.apache.carbondata.spark.exception.ProcessMetaDataException
 class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
   // scalastyle:off lineLength
   override def beforeEach(): Unit = {
+    sql("set carbon.enable.mv = true")
     sql("drop table IF EXISTS maintable")
     sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
@@ -59,6 +60,7 @@ class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
   }
 
   override def afterEach(): Unit = {
+    sql("set carbon.enable.mv = false")
     sql("drop materialized view if exists dm_mv ")
     sql("drop materialized view if exists dm_pre ")
     sql("drop table IF EXISTS maintable")
@@ -727,6 +729,7 @@ class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
 
   test("drop meta cache on mv materialized view table") {
     defaultConfig()
+    sql("set carbon.enable.mv = true")
     sql("drop table IF EXISTS maintable")
     sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestPartitionWithMV.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestPartitionWithMV.scala
index 139acd2373..9729fc6879 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestPartitionWithMV.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/TestPartitionWithMV.scala
@@ -54,9 +54,11 @@ class TestPartitionWithMV extends QueryTest with BeforeAndAfterAll with BeforeAn
         | STORED AS carbondata
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$testData' into table maintable")
+    sql("set carbon.enable.mv = true")
   }
 
   override def afterAll(): Unit = {
+    sql("set carbon.enable.mv = false")
     sql("drop database if exists partition_mv cascade")
     sql("use default")
     CarbonProperties.getInstance()
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestCreateMVWithTimeSeries.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestCreateMVWithTimeSeries.scala
index 038f7a55cc..0057cff626 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestCreateMVWithTimeSeries.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestCreateMVWithTimeSeries.scala
@@ -41,6 +41,7 @@ class TestCreateMVWithTimeSeries extends QueryTest with BeforeAndAfterAll {
         "deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int, utilization int,salary int) STORED AS carbondata")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE maintable  OPTIONS
          |('DELIMITER'= ',', 'QUOTECHAR'= '"')""".stripMargin)
+    sql("set carbon.enable.mv = true")
   }
 
   def drop(): Unit = {
@@ -235,6 +236,7 @@ class TestCreateMVWithTimeSeries extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll(): Unit = {
     drop()
+    sql("set carbon.enable.mv = false")
     if (null != timestampFormat) {
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat)
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesLoadAndQuery.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesLoadAndQuery.scala
index 0c8f90873d..9363161c76 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesLoadAndQuery.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesLoadAndQuery.scala
@@ -31,6 +31,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
   override def beforeAll(): Unit = {
     drop()
     createTable()
+    sql("set carbon.enable.mv = true")
   }
 
   test("create MV timeseries materialized view with simple projection and aggregation and filter") {
@@ -367,6 +368,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
 
   override def afterAll(): Unit = {
     drop()
+    sql("set carbon.enable.mv = false")
   }
 
   def drop(): Unit = {
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesQueryRollUp.scala b/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesQueryRollUp.scala
index 96864a47b9..1603a7cccc 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesQueryRollUp.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/view/timeseries/TestMVTimeSeriesQueryRollUp.scala
@@ -28,10 +28,12 @@ class TestMVTimeSeriesQueryRollUp extends QueryTest with BeforeAndAfterAll {
     drop()
     createTable()
     loadData("maintable")
+    sql("set carbon.enable.mv = true")
   }
 
   override def afterAll(): Unit = {
     drop()
+    sql("set carbon.enable.mv = false")
   }
 
   test("test timeseries query rollup with simple projection") {
diff --git a/integration/spark/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala b/integration/spark/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
index 48ddec4d6d..315dc048ab 100644
--- a/integration/spark/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
+++ b/integration/spark/src/test/scala/org/apache/spark/carbondata/register/TestRegisterCarbonTable.scala
@@ -35,6 +35,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterEach {
     sql("drop database if exists carbon cascade")
     sql("drop database if exists carbon1 cascade")
     sql("drop database if exists carbon2 cascade")
+    sql("set carbon.enable.mv = true")
   }
 
   private def restoreData(dblocation: String, tableName: String) = {
@@ -271,6 +272,7 @@ class TestRegisterCarbonTable extends QueryTest with BeforeAndAfterEach {
   }
 
   override def afterEach {
+    sql("set carbon.enable.mv = false")
     sql("use default")
     sql("drop database if exists carbon cascade")
     sql("drop database if exists carbon1 cascade")