You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kr...@apache.org on 2022/07/22 10:18:38 UTC

[hive] branch master updated: HIVE-26375: Invalid materialized view after rebuild if source table was compacted (Krisztian Kasa, reviewed by Aman Sinha, Laszlo Pinter)

This is an automated email from the ASF dual-hosted git repository.

krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 4146d220bfc HIVE-26375: Invalid materialized view after rebuild if source table was compacted (Krisztian Kasa, reviewed by Aman Sinha, Laszlo Pinter)
4146d220bfc is described below

commit 4146d220bfc0a29d117676844a4c7021fcc6a30d
Author: Krisztian Kasa <ka...@gmail.com>
AuthorDate: Fri Jul 22 12:18:27 2022 +0200

    HIVE-26375: Invalid materialized view after rebuild if source table was compacted (Krisztian Kasa, reviewed by Aman Sinha, Laszlo Pinter)
---
 .../txn/compactor/TestMaterializedViewRebuild.java | 100 ++++++++++++++------
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   |   5 +-
 .../hadoop/hive/metastore/txn/TestTxnHandler.java  |  10 +-
 .../hadoop/hive/metastore/HiveMetaStoreClient.java |   8 +-
 .../hadoop/hive/metastore/IMetaStoreClient.java    |   9 +-
 .../apache/hadoop/hive/metastore/HMSHandler.java   |   5 +-
 .../apache/hadoop/hive/metastore/ObjectStore.java  |  52 +++++++++--
 .../hadoop/hive/metastore/txn/TxnHandler.java      | 103 ---------------------
 .../apache/hadoop/hive/metastore/txn/TxnStore.java |  11 +--
 .../metastore/HiveMetaStoreClientPreCatalog.java   |  12 +--
 10 files changed, 133 insertions(+), 182 deletions(-)

diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMaterializedViewRebuild.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMaterializedViewRebuild.java
index 1ddf22e9201..a0bf2608bfb 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMaterializedViewRebuild.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestMaterializedViewRebuild.java
@@ -38,42 +38,37 @@ public class TestMaterializedViewRebuild extends CompactorOnTezTest {
   private static final String MV1 = "mat1";
 
   private static final List<String> FULL_REBUILD_PLAN = Arrays.asList(
-      "CBO PLAN:",
-      "HiveProject(a=[$0], b=[$1], c=[$2])",
-      "  HiveFilter(condition=[OR(IS NULL($0), >($0, 0))])",
-      "    HiveTableScan(table=[[default, t1]], table:alias=[t1])",
-      ""
+          "CBO PLAN:",
+          "HiveProject(a=[$0], b=[$1], c=[$2])",
+          "  HiveFilter(condition=[OR(IS NULL($0), >($0, 0))])",
+          "    HiveTableScan(table=[[default, t1]], table:alias=[t1])",
+          ""
   );
 
   private static final List<String> INCREMENTAL_REBUILD_PLAN = Arrays.asList(
-      "CBO PLAN:",
-      "HiveProject(a=[$0], b=[$1], c=[$2])",
-      "  HiveFilter(condition=[AND(<(1, $5.writeid), OR(>($0, 0), IS NULL($0)))])",
-      "    HiveTableScan(table=[[default, t1]], table:alias=[t1])",
-      ""
+          "CBO PLAN:",
+          "HiveProject(a=[$0], b=[$1], c=[$2])",
+          "  HiveFilter(condition=[AND(<(2, $5.writeid), OR(>($0, 0), IS NULL($0)))])",
+          "    HiveTableScan(table=[[default, t1]], table:alias=[t1])",
+          ""
   );
 
   private static final List<String> EXPECTED_RESULT = Arrays.asList(
-      "1\tone\t1.1",
-      "2\ttwo\t2.2",
-      "3\tthree\t3.3",
-      "NULL\tNULL\tNULL"
+          "1\tone\t1.1",
+          "2\ttwo\t2.2",
+          "3\tthree\t3.3",
+          "NULL\tNULL\tNULL"
   );
 
 
   @Override
   public void setup() throws Exception {
     super.setup();
-  }
 
-  private void createTestSchema(String tblproperties) throws Exception {
-    executeStatementOnDriver("create table " + TABLE1 + "(a int, b varchar(128), c float) stored as orc " +
-        "TBLPROPERTIES ('transactional'='true'" + tblproperties + ")", driver);
-    executeStatementOnDriver("insert into " + TABLE1 + "(a,b, c) values " +
-        "(1, 'one', 1.1), (2, 'two', 2.2), (NULL, NULL, NULL)", driver);
-    executeStatementOnDriver("create materialized view " + MV1 + " stored as orc " +
-        "TBLPROPERTIES ('transactional'='true') as " +
-        "select a,b,c from " + TABLE1 + " where a > 0 or a is null", driver);
+    executeStatementOnDriver("create table " + TABLE1 + "(a int, b varchar(128), c float) stored as orc TBLPROPERTIES ('transactional'='true')", driver);
+    executeStatementOnDriver("insert into " + TABLE1 + "(a,b, c) values (1, 'one', 1.1), (2, 'two', 2.2), (NULL, NULL, NULL)", driver);
+    executeStatementOnDriver("create materialized view " + MV1 + " stored as orc TBLPROPERTIES ('transactional'='true') as " +
+            "select a,b,c from " + TABLE1 + " where a > 0 or a is null", driver);
   }
 
   @Override
@@ -85,8 +80,7 @@ public class TestMaterializedViewRebuild extends CompactorOnTezTest {
   }
 
   @Test
-  public void testWhenMajorCompactionThenIncrementalMVRebuildIsStillAvailable() throws Exception {
-    createTestSchema("");
+  public void testWhenMajorCompactionThenIncrementalMVRebuildNotUsed() throws Exception {
 
     executeStatementOnDriver("insert into " + TABLE1 + "(a,b,c) values (3, 'three', 3.3)", driver);
 
@@ -97,7 +91,7 @@ public class TestMaterializedViewRebuild extends CompactorOnTezTest {
     txnHandler.cleanTxnToWriteIdTable();
 
     List<String> result = execSelectAndDumpData("explain cbo alter materialized view " + MV1 + " rebuild", driver, "");
-    Assert.assertEquals(INCREMENTAL_REBUILD_PLAN, result);
+    Assert.assertEquals(FULL_REBUILD_PLAN, result);
     executeStatementOnDriver("alter materialized view " + MV1 + " rebuild", driver);
 
     result = execSelectAndDumpData("select * from " + MV1 , driver, "");
@@ -108,8 +102,7 @@ public class TestMaterializedViewRebuild extends CompactorOnTezTest {
   }
 
   @Test
-  public void testWhenCompactInsertOnlySourceTableThenIncrementalMVRebuildIsNotAvailable() throws Exception {
-    createTestSchema(", 'transactional_properties'='insert_only'");
+  public void testSecondRebuildCanBeIncrementalAfterMajorCompaction() throws Exception {
 
     executeStatementOnDriver("insert into " + TABLE1 + "(a,b,c) values (3, 'three', 3.3)", driver);
 
@@ -119,6 +112,30 @@ public class TestMaterializedViewRebuild extends CompactorOnTezTest {
     TxnStore txnHandler = TxnUtils.getTxnStore(conf);
     txnHandler.cleanTxnToWriteIdTable();
 
+    executeStatementOnDriver("alter materialized view " + MV1 + " rebuild", driver);
+
+    // Insert after first rebuild.
+    executeStatementOnDriver("insert into " + TABLE1 + "(a,b,c) values (4, 'four', 4.4)", driver);
+
+    List<String> result = execSelectAndDumpData("explain cbo alter materialized view " + MV1 + " rebuild", driver, "");
+    Assert.assertEquals(INCREMENTAL_REBUILD_PLAN, result);
+    executeStatementOnDriver("alter materialized view " + MV1 + " rebuild", driver);
+
+    result = execSelectAndDumpData("select * from " + MV1 , driver, "");
+    List<String> expected = new ArrayList(EXPECTED_RESULT);
+    expected.add("4\tfour\t4.4");
+    assertResult(expected, result);
+
+    result = execSelectAndDumpData("explain cbo select a,b,c from " + TABLE1 + " where a > 0 or a is null", driver, "");
+    Assert.assertEquals(Arrays.asList("CBO PLAN:", "HiveTableScan(table=[[default, " + MV1 + "]], table:alias=[default." + MV1 + "])", ""), result);
+  }
+
+  @Test
+  public void testWhenCleanUpOfMajorCompactionHasNotFinishedIncrementalMVRebuildNotUsed() throws Exception {
+    executeStatementOnDriver("insert into " + TABLE1 + "(a,b,c) values (3, 'three', 3.3)", driver);
+
+    CompactorTestUtil.runCompaction(conf, "default",  TABLE1 , CompactionType.MAJOR, true);
+
     List<String> result = execSelectAndDumpData("explain cbo alter materialized view " + MV1 + " rebuild", driver, "");
     Assert.assertEquals(FULL_REBUILD_PLAN, result);
     executeStatementOnDriver("alter materialized view " + MV1 + " rebuild", driver);
@@ -130,6 +147,33 @@ public class TestMaterializedViewRebuild extends CompactorOnTezTest {
     Assert.assertEquals(Arrays.asList("CBO PLAN:", "HiveTableScan(table=[[default, " + MV1 + "]], table:alias=[default." + MV1 + "])", ""), result);
   }
 
+  private static final List<String> EXPECTED_RESULT_AFTER_UPDATE = Arrays.asList(
+          "1\tChanged\t1.1",
+          "2\ttwo\t2.2",
+          "NULL\tNULL\tNULL"
+  );
+
+  @Test
+  public void testWhenMajorCompactionThenIncrementalMVRebuildNotUsedInPresenceOfUpdate() throws Exception {
+    executeStatementOnDriver("update " + TABLE1 + " set b = 'Changed' where a = 1", driver);
+
+    CompactorTestUtil.runCompaction(conf, "default",  TABLE1 , CompactionType.MAJOR, true);
+    CompactorTestUtil.runCleaner(conf);
+    verifySuccessfulCompaction(1);
+    TxnStore txnHandler = TxnUtils.getTxnStore(conf);
+    txnHandler.cleanTxnToWriteIdTable();
+
+    List<String> result = execSelectAndDumpData("explain cbo alter materialized view " + MV1 + " rebuild", driver, "");
+    Assert.assertEquals(FULL_REBUILD_PLAN, result);
+    executeStatementOnDriver("alter materialized view " + MV1 + " rebuild", driver);
+
+    result = execSelectAndDumpData("select * from " + MV1 , driver, "");
+    assertResult(EXPECTED_RESULT_AFTER_UPDATE, result);
+
+    result = execSelectAndDumpData("explain cbo select a,b,c from " + TABLE1 + " where a > 0 or a is null", driver, "");
+    Assert.assertEquals(Arrays.asList("CBO PLAN:", "HiveTableScan(table=[[default, " + MV1 + "]], table:alias=[default." + MV1 + "])", ""), result);
+  }
+
   private void assertResult(List<String> expected, List<String> actual) {
     Assert.assertEquals(expected.size(), actual.size());
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index d011736bcf1..f640b9778e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -2080,7 +2080,7 @@ public class Hive {
               // Obtain additional information if we should try incremental rewriting / rebuild
               // We will not try partial rewriting if there were update/delete/compaction operations on source tables
               Materialization invalidationInfo = getMSC().getMaterializationInvalidationInfo(
-                  materializedViewTable.getMVMetadata().creationMetadata);
+                  materializedViewTable.getMVMetadata().creationMetadata, conf.get(ValidTxnList.VALID_TXNS_KEY));
               if (invalidationInfo == null || invalidationInfo.isSourceTablesUpdateDeleteModified() ||
                   invalidationInfo.isSourceTablesCompacted()) {
                 // We ignore (as it did not meet the requirements), but we do not need to update it in the
@@ -2177,7 +2177,8 @@ public class Hive {
           } else {
             // Obtain additional information if we should try incremental rewriting / rebuild
             // We will not try partial rewriting if there were update/delete/compaction operations on source tables
-            invalidationInfo = getMSC().getMaterializationInvalidationInfo(metadata.creationMetadata);
+            invalidationInfo = getMSC().getMaterializationInvalidationInfo(
+                    metadata.creationMetadata, conf.get(ValidTxnList.VALID_TXNS_KEY));
             ignore = invalidationInfo == null || invalidationInfo.isSourceTablesCompacted();
           }
           if (ignore) {
diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
index 969f0a504de..43fa5410c39 100644
--- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
+++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
@@ -1879,6 +1879,7 @@ public class TestTxnHandler {
   @Test
   public void testGetMaterializationInvalidationInfo() throws MetaException {
     testGetMaterializationInvalidationInfo(
+            new ValidReadTxnList(new long[] {6, 11}, new BitSet(), 10L, 12L),
             new ValidReaderWriteIdList(TableName.getDbTable("default", "t1"), new long[] { 2 }, new BitSet(), 1)
     );
   }
@@ -1886,6 +1887,7 @@ public class TestTxnHandler {
   @Test
   public void testGetMaterializationInvalidationInfoWhenTableHasNoException() throws MetaException {
     testGetMaterializationInvalidationInfo(
+            new ValidReadTxnList(new long[] {6, 11}, new BitSet(), 10L, 12L),
             new ValidReaderWriteIdList(TableName.getDbTable("default", "t1"), new long[0], new BitSet(), 1)
     );
   }
@@ -1893,12 +1895,13 @@ public class TestTxnHandler {
   @Test
   public void testGetMaterializationInvalidationInfoWhenCurrentTxnListHasNoException() throws MetaException {
     testGetMaterializationInvalidationInfo(
+            new ValidReadTxnList(new long[0], new BitSet(), 10L, 12L),
             new ValidReaderWriteIdList(TableName.getDbTable("default", "t1"), new long[] { 2 }, new BitSet(), 1)
     );
   }
 
   private void testGetMaterializationInvalidationInfo(
-          ValidReaderWriteIdList... tableWriteIdList) throws MetaException {
+          ValidReadTxnList currentValidTxnList, ValidReaderWriteIdList... tableWriteIdList) throws MetaException {
     ValidTxnWriteIdList validTxnWriteIdList = new ValidTxnWriteIdList(5L);
     for (ValidReaderWriteIdList tableWriteId : tableWriteIdList) {
       validTxnWriteIdList.addTableValidWriteIdList(tableWriteId);
@@ -1917,10 +1920,11 @@ public class TestTxnHandler {
     CreationMetadata creationMetadata = new CreationMetadata();
     creationMetadata.setDbName("default");
     creationMetadata.setTblName("mat1");
-    creationMetadata.setSourceTables(Collections.singletonList(sourceTable));
+    creationMetadata.setTablesUsed(new HashSet<String>() {{ add("default.t1"); }});
     creationMetadata.setValidTxnList(validTxnWriteIdList.toString());
 
-    Materialization materialization = txnHandler.getMaterializationInvalidationInfo(creationMetadata);
+    Materialization materialization = txnHandler.getMaterializationInvalidationInfo(
+            creationMetadata, currentValidTxnList.toString());
     assertFalse(materialization.isSourceTablesUpdateDeleteModified());
   }
 
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 08d92f1ef25..ecf85fb817a 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -2704,13 +2704,9 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
     return deepCopyTables(FilterUtils.filterTablesIfEnabled(isClientFilterEnabled, filterHook, tabs));
   }
 
-  public Materialization getMaterializationInvalidationInfo(CreationMetadata cm)
-      throws MetaException, InvalidOperationException, UnknownDBException, TException {
-    return client.get_materialization_invalidation_info(cm, null);
-  }
-
+  @Override
   public Materialization getMaterializationInvalidationInfo(CreationMetadata cm, String validTxnList)
-      throws MetaException, InvalidOperationException, UnknownDBException, TException {
+          throws MetaException, InvalidOperationException, UnknownDBException, TException {
     return client.get_materialization_invalidation_info(cm, validTxnList);
   }
 
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index dfd06f96a2a..75e1b2c8bc8 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -808,15 +808,8 @@ public interface IMetaStoreClient {
   /**
    * Returns the invalidation information for the materialized views given as input.
    */
-  Materialization getMaterializationInvalidationInfo(CreationMetadata cm)
-      throws MetaException, InvalidOperationException, UnknownDBException, TException;
-
-  @Deprecated
-  /**
-   * Use {@link IMetaStoreClient#getMaterializationInvalidationInfo(CreationMetadata)} instead.
-   */
   Materialization getMaterializationInvalidationInfo(CreationMetadata cm, String validTxnList)
-      throws MetaException, InvalidOperationException, UnknownDBException, TException;
+          throws MetaException, InvalidOperationException, UnknownDBException, TException;
 
   /**
    * Updates the creation metadata for the materialized view.
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
index 2cde211c297..6b6f5e463c8 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java
@@ -3848,10 +3848,7 @@ public class HMSHandler extends FacebookBase implements IHMSHandler {
   }
 
   @Override
-  public Materialization get_materialization_invalidation_info(final CreationMetadata cm, String validTxnList) throws MetaException {
-    if (validTxnList == null) {
-      return getTxnHandler().getMaterializationInvalidationInfo(cm);
-    }
+  public Materialization get_materialization_invalidation_info(final CreationMetadata cm, final String validTxnList) throws MetaException {
     return getTxnHandler().getMaterializationInvalidationInfo(cm, validTxnList);
   }
 
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 83afdce22ea..435ac4fc6bd 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -2527,19 +2527,51 @@ public class ObjectStore implements RawStore, Configurable {
     }
     assert !m.isSetMaterializationTime();
     Set<MMVSource> tablesUsed = new HashSet<>();
-    for (SourceTable sourceTable : m.getSourceTables()) {
-      Table table = sourceTable.getTable();
-      MTable mtbl = getMTable(m.getCatName(), table.getDbName(), table.getTableName(), false).mtbl;
-      MMVSource source = new MMVSource();
-      source.setTable(mtbl);
-      source.setInsertedCount(sourceTable.getInsertedCount());
-      source.setUpdatedCount(sourceTable.getUpdatedCount());
-      source.setDeletedCount(sourceTable.getDeletedCount());
-      tablesUsed.add(source);
+    if (m.isSetSourceTables()) {
+      for (SourceTable sourceTable : m.getSourceTables()) {
+        tablesUsed.add(convertToSourceTable(m.getCatName(), sourceTable));
+      }
+    } else {
+      for (String fullyQualifiedName : m.getTablesUsed()) {
+        tablesUsed.add(convertToSourceTable(m.getCatName(), fullyQualifiedName));
+      }
     }
     return new MCreationMetadata(normalizeIdentifier(m.getCatName()),
             normalizeIdentifier(m.getDbName()), normalizeIdentifier(m.getTblName()),
-        tablesUsed, m.getValidTxnList(), System.currentTimeMillis());
+            tablesUsed, m.getValidTxnList(), System.currentTimeMillis());
+  }
+
+  private MMVSource convertToSourceTable(String catalog, SourceTable sourceTable) {
+    Table table = sourceTable.getTable();
+    MTable mtbl = getMTable(catalog, table.getDbName(), table.getTableName(), false).mtbl;
+    MMVSource source = new MMVSource();
+    source.setTable(mtbl);
+    source.setInsertedCount(sourceTable.getInsertedCount());
+    source.setUpdatedCount(sourceTable.getUpdatedCount());
+    source.setDeletedCount(sourceTable.getDeletedCount());
+    return source;
+  }
+
+  /**
+   * This method resets the stats to 0 and supports only backward compatibility with clients does not
+   * send {@link SourceTable} instances.
+   *
+   * Use {@link ObjectStore#convertToSourceTable(String, SourceTable)} instead.
+   *
+   * @param catalog Catalog name where source table is located
+   * @param fullyQualifiedTableName fully qualified name of source table
+   * @return {@link MMVSource} instance represents this source table.
+   */
+  @Deprecated
+  private MMVSource convertToSourceTable(String catalog, String fullyQualifiedTableName) {
+    String[] names = fullyQualifiedTableName.split("\\.");
+    MTable mtbl = getMTable(catalog, names[0], names[1], false).mtbl;
+    MMVSource source = new MMVSource();
+    source.setTable(mtbl);
+    source.setInsertedCount(0L);
+    source.setUpdatedCount(0L);
+    source.setDeletedCount(0L);
+    return source;
   }
 
   private CreationMetadata convertToCreationMetadata(MCreationMetadata s) throws MetaException {
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index 9e270fc731d..5ab11b1c6bc 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -2544,109 +2544,6 @@ abstract class TxnHandler implements TxnStore, TxnStore.MutexAPI {
    */
   @Override
   @RetrySemantics.ReadOnly
-  public Materialization getMaterializationInvalidationInfo(CreationMetadata creationMetadata) throws MetaException {
-    if (creationMetadata.getSourceTables().isEmpty()) {
-      // Bail out
-      LOG.warn("Materialization creation metadata does not contain any table");
-      return null;
-    }
-
-    boolean sourceTablesUpdateDeleteModified = false;
-    for (SourceTable sourceTable : creationMetadata.getSourceTables()) {
-      if (sourceTable.getDeletedCount() > 0 || sourceTable.getUpdatedCount() > 0) {
-        sourceTablesUpdateDeleteModified = true;
-        break;
-      }
-    }
-
-    Boolean sourceTablesCompacted = wasCompacted(creationMetadata);
-    if (sourceTablesCompacted == null) {
-      return null;
-    }
-    return new Materialization(sourceTablesUpdateDeleteModified, sourceTablesCompacted);
-  }
-
-  private Boolean wasCompacted(CreationMetadata creationMetadata) throws MetaException {
-    Set<String> insertOnlyTables = new HashSet<>();
-    for (SourceTable sourceTable : creationMetadata.getSourceTables()) {
-      Table table = sourceTable.getTable();
-      String transactionalProp = table.getParameters().get(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES);
-      if (!"insert_only".equalsIgnoreCase(transactionalProp)) {
-        continue;
-      }
-
-      insertOnlyTables.add(
-              TableName.getDbTable(sourceTable.getTable().getDbName(), sourceTable.getTable().getTableName()));
-    }
-
-    if (insertOnlyTables.isEmpty()) {
-      return false;
-    }
-
-    // We are composing a query that returns a single row if a compaction happened after
-    // the materialization was created. Otherwise, query returns 0 rows.
-
-    // Parse validReaderWriteIdList from creation metadata
-    final ValidTxnWriteIdList validReaderWriteIdList =
-            new ValidTxnWriteIdList(creationMetadata.getValidTxnList());
-
-
-    List<String> params = new ArrayList<>();
-    StringBuilder queryCompletedCompactions = new StringBuilder();
-    StringBuilder queryCompactionQueue = new StringBuilder();
-    // compose a query that select transactions containing an update...
-    queryCompletedCompactions.append("SELECT 1 FROM \"COMPLETED_COMPACTIONS\" WHERE (");
-    queryCompactionQueue.append("SELECT 1 FROM \"COMPACTION_QUEUE\" WHERE (");
-    int i = 0;
-    for (String fullyQualifiedName : insertOnlyTables) {
-      ValidWriteIdList tblValidWriteIdList =
-              validReaderWriteIdList.getTableValidWriteIdList(fullyQualifiedName);
-      if (tblValidWriteIdList == null) {
-        LOG.warn("ValidWriteIdList for table {} not present in creation metadata, this should not happen", fullyQualifiedName);
-        return null;
-      }
-
-      // ...for each of the tables that are part of the materialized view,
-      // where the transaction had to be committed after the materialization was created...
-      if (i != 0) {
-        queryCompletedCompactions.append("OR");
-        queryCompactionQueue.append("OR");
-      }
-      String[] names = TxnUtils.getDbTableName(fullyQualifiedName);
-      assert (names.length == 2);
-      queryCompletedCompactions.append(" (\"CC_DATABASE\"=? AND \"CC_TABLE\"=?");
-      queryCompactionQueue.append(" (\"CQ_DATABASE\"=? AND \"CQ_TABLE\"=?");
-      params.add(names[0]);
-      params.add(names[1]);
-      queryCompletedCompactions.append(" AND (\"CC_HIGHEST_WRITE_ID\" > ");
-      queryCompletedCompactions.append(tblValidWriteIdList.getHighWatermark());
-      queryCompletedCompactions.append(tblValidWriteIdList.getInvalidWriteIds().length == 0 ? ") " :
-              " OR \"CC_HIGHEST_WRITE_ID\" IN(" + StringUtils.join(",",
-                      Arrays.asList(ArrayUtils.toObject(tblValidWriteIdList.getInvalidWriteIds()))) + ") ) ");
-      queryCompletedCompactions.append(") ");
-      queryCompactionQueue.append(") ");
-      i++;
-    }
-    // ... and where the transaction has already been committed as per snapshot taken
-    // when we are running current query
-    queryCompletedCompactions.append(")");
-    queryCompactionQueue.append(") ");
-
-    // Execute query
-    queryCompletedCompactions.append(" UNION ");
-    queryCompletedCompactions.append(queryCompactionQueue);
-    List<String> paramsTwice = new ArrayList<>(params);
-    paramsTwice.addAll(params);
-    return executeBoolean(queryCompletedCompactions.toString(), paramsTwice,
-            "Unable to retrieve materialization invalidation information: compactions");
-  }
-
-  /**
-   * Use {@link TxnHandler#getMaterializationInvalidationInfo(CreationMetadata)} instead.
-   */
-  @Override
-  @Deprecated
-  @RetrySemantics.ReadOnly
   public Materialization getMaterializationInvalidationInfo(
           CreationMetadata creationMetadata, String validTxnListStr) throws MetaException {
     if (creationMetadata.getTablesUsed().isEmpty()) {
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnStore.java
index cdfd95263da..34d77f90742 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnStore.java
@@ -197,17 +197,14 @@ public interface TxnStore extends Configurable {
   /**
    * Get invalidation info for the materialization. Currently, the materialization information
    * only contains information about whether there was update/delete operations on the source
-   * tables and were any of the insert-only source tables compacted used by the materialization
-   * since it was created.
+   * tables used by the materialization since it was created.
    * @param cm creation metadata for the materialization
+   * @param validTxnList valid transaction list for snapshot taken for current query
    * @throws MetaException
    */
   @RetrySemantics.Idempotent
-  Materialization getMaterializationInvalidationInfo(final CreationMetadata cm)
-          throws MetaException;
-
-  @RetrySemantics.Idempotent
-  Materialization getMaterializationInvalidationInfo(final CreationMetadata cm, String validTxnList)
+  Materialization getMaterializationInvalidationInfo(
+          final CreationMetadata cm, final String validTxnList)
           throws MetaException;
 
   @RetrySemantics.ReadOnly
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java
index 222203c63bc..2a92c09c580 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClientPreCatalog.java
@@ -1587,18 +1587,8 @@ public class HiveMetaStoreClientPreCatalog implements IMetaStoreClient, AutoClos
 
   /** {@inheritDoc} */
   @Override
-  public Materialization getMaterializationInvalidationInfo(CreationMetadata cm)
-      throws MetaException, InvalidOperationException, UnknownDBException, TException {
-    return client.get_materialization_invalidation_info(cm, null);
-  }
-
-  @Deprecated
-  /**
-   * Use {@link HiveMetaStoreClientPreCatalog#getMaterializationInvalidationInfo(CreationMetadata)} instead.
-   */
-  @Override
   public Materialization getMaterializationInvalidationInfo(CreationMetadata cm, String validTxnList)
-      throws MetaException, InvalidOperationException, UnknownDBException, TException {
+          throws MetaException, InvalidOperationException, UnknownDBException, TException {
     return client.get_materialization_invalidation_info(cm, validTxnList);
   }