You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kr...@apache.org on 2021/10/05 03:25:35 UTC
[hive] branch master updated: HIVE-25572: Exception while querying
materialized view invalidation info (Krisztian Kasa, reviewed by Peter Vary)
This is an automated email from the ASF dual-hosted git repository.
krisztiankasa pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 30e16fb HIVE-25572: Exception while querying materialized view invalidation info (Krisztian Kasa, reviewed by Peter Vary)
30e16fb is described below
commit 30e16fbd9812efb87bc0bf4576d56dc7ec74589c
Author: Krisztian Kasa <kk...@cloudera.com>
AuthorDate: Tue Oct 5 05:25:19 2021 +0200
HIVE-25572: Exception while querying materialized view invalidation info (Krisztian Kasa, reviewed by Peter Vary)
---
.../hadoop/hive/metastore/txn/TestTxnHandler.java | 51 ++++++++++++++++++++++
.../hadoop/hive/metastore/txn/TxnHandler.java | 4 +-
2 files changed, 53 insertions(+), 2 deletions(-)
diff --git a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
index a045666..ba5d131 100644
--- a/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
+++ b/ql/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
@@ -18,6 +18,11 @@
package org.apache.hadoop.hive.metastore.txn;
import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.common.TableName;
+import org.apache.hadoop.hive.common.ValidReadTxnList;
+import org.apache.hadoop.hive.common.ValidReaderWriteIdList;
+import org.apache.hadoop.hive.common.ValidTxnList;
+import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
import org.apache.hadoop.hive.metastore.api.AbortTxnsRequest;
@@ -29,6 +34,7 @@ import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
import org.apache.hadoop.hive.metastore.api.CompactionRequest;
import org.apache.hadoop.hive.metastore.api.CompactionResponse;
import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.metastore.api.CreationMetadata;
import org.apache.hadoop.hive.metastore.api.DataOperationType;
import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
@@ -41,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.LockRequest;
import org.apache.hadoop.hive.metastore.api.LockResponse;
import org.apache.hadoop.hive.metastore.api.LockState;
import org.apache.hadoop.hive.metastore.api.LockType;
+import org.apache.hadoop.hive.metastore.api.Materialization;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
@@ -80,7 +87,9 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.BitSet;
import java.util.Collections;
+import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
@@ -1861,6 +1870,48 @@ public class TestTxnHandler {
}
}
+ @Test
+ public void testGetMaterializationInvalidationInfo() throws MetaException {
+ testGetMaterializationInvalidationInfo(
+ new ValidReadTxnList(new long[] {6, 11}, new BitSet(), 10L, 12L),
+ new ValidReaderWriteIdList(TableName.getDbTable("default", "t1"), new long[] { 2 }, new BitSet(), 1)
+ );
+ }
+
+ @Test
+ public void testGetMaterializationInvalidationInfoWhenTableHasNoException() throws MetaException {
+ testGetMaterializationInvalidationInfo(
+ new ValidReadTxnList(new long[] {6, 11}, new BitSet(), 10L, 12L),
+ new ValidReaderWriteIdList(TableName.getDbTable("default", "t1"), new long[0], new BitSet(), 1)
+ );
+ }
+
+ @Test
+ public void testGetMaterializationInvalidationInfoWhenCurrentTxnListHasNoException() throws MetaException {
+ testGetMaterializationInvalidationInfo(
+ new ValidReadTxnList(new long[0], new BitSet(), 10L, 12L),
+ new ValidReaderWriteIdList(TableName.getDbTable("default", "t1"), new long[] { 2 }, new BitSet(), 1)
+ );
+ }
+
+ private void testGetMaterializationInvalidationInfo(
+ ValidReadTxnList currentValidTxnList, ValidReaderWriteIdList... tableWriteIdList) throws MetaException {
+ ValidTxnWriteIdList validTxnWriteIdList = new ValidTxnWriteIdList(5L);
+ for (ValidReaderWriteIdList tableWriteId : tableWriteIdList) {
+ validTxnWriteIdList.addTableValidWriteIdList(tableWriteId);
+ }
+
+ CreationMetadata creationMetadata = new CreationMetadata();
+ creationMetadata.setDbName("default");
+ creationMetadata.setTblName("mat1");
+ creationMetadata.setTablesUsed(new HashSet<String>() {{ add("default.t1"); }});
+ creationMetadata.setValidTxnList(validTxnWriteIdList.toString());
+
+ Materialization materialization = txnHandler.getMaterializationInvalidationInfo(
+ creationMetadata, currentValidTxnList.toString());
+ assertFalse(materialization.isSourceTablesUpdateDeleteModified());
+ }
+
private void updateTxns(Connection conn) throws SQLException {
Statement stmt = conn.createStatement();
stmt.executeUpdate("UPDATE \"TXNS\" SET \"TXN_LAST_HEARTBEAT\" = \"TXN_LAST_HEARTBEAT\" + 1");
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index a9a00af..639ed48 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -2554,10 +2554,10 @@ abstract class TxnHandler implements TxnStore, TxnStore.MutexAPI {
queryCompletedCompactions.append(" AND (\"CC_HIGHEST_WRITE_ID\" > " + tblValidWriteIdList.getHighWatermark());
queryUpdateDelete.append(tblValidWriteIdList.getInvalidWriteIds().length == 0 ? ") " :
" OR \"CTC_WRITEID\" IN(" + StringUtils.join(",",
- Arrays.asList(ArrayUtils.toObject(tblValidWriteIdList.getInvalidWriteIds()))) + ") ");
+ Arrays.asList(ArrayUtils.toObject(tblValidWriteIdList.getInvalidWriteIds()))) + ") ) ");
queryCompletedCompactions.append(tblValidWriteIdList.getInvalidWriteIds().length == 0 ? ") " :
" OR \"CC_HIGHEST_WRITE_ID\" IN(" + StringUtils.join(",",
- Arrays.asList(ArrayUtils.toObject(tblValidWriteIdList.getInvalidWriteIds()))) + ") ");
+ Arrays.asList(ArrayUtils.toObject(tblValidWriteIdList.getInvalidWriteIds()))) + ") ) ");
queryUpdateDelete.append(") ");
queryCompletedCompactions.append(") ");
queryCompactionQueue.append(") ");