You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ma...@apache.org on 2019/08/14 04:44:59 UTC

[hive] branch master updated: HIVE-22092 : Fetch is failing with IllegalArgumentException: No ValidTxnList when refetch is done. (Mahesh Kumar Behera reviewed by Sankar Hariappan)

This is an automated email from the ASF dual-hosted git repository.

mahesh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new fba9c20  HIVE-22092 : Fetch is failing with IllegalArgumentException: No ValidTxnList when refetch is done. (Mahesh Kumar Behera reviewed by  Sankar Hariappan)
fba9c20 is described below

commit fba9c20ce9c9de919fb5d055d796d1e39f59e93e
Author: Mahesh Kumar Behera <ma...@apache.org>
AuthorDate: Wed Aug 14 10:10:10 2019 +0530

    HIVE-22092 : Fetch is failing with IllegalArgumentException: No ValidTxnList when refetch is done. (Mahesh Kumar Behera reviewed by  Sankar Hariappan)
---
 .../java/org/apache/hive/jdbc/TestJdbcDriver2.java | 35 ++++++++++++++++++++++
 .../org/apache/hadoop/hive/ql/exec/FetchTask.java  |  7 ++++-
 2 files changed, 41 insertions(+), 1 deletion(-)

diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 02d4360..4010535 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -81,6 +81,8 @@ import java.util.regex.Pattern;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.junit.rules.TestName;
 
+import static java.sql.ResultSet.CONCUR_READ_ONLY;
+import static java.sql.ResultSet.TYPE_SCROLL_INSENSITIVE;
 import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME;
 import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
 import static org.junit.Assert.assertEquals;
@@ -3210,6 +3212,39 @@ public class TestJdbcDriver2 {
     stmt1.close();
   }
 
+  @Test
+  public void testResultNextAcidTable() throws Exception {
+    Statement stmt = con.createStatement(TYPE_SCROLL_INSENSITIVE, CONCUR_READ_ONLY);
+    try {
+      stmt.execute("set " + ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=true");
+      stmt.execute("set " + ConfVars.HIVE_TXN_MANAGER.varname +
+              "=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager");
+      stmt.execute("create table tbl (fld int) tblproperties(" +
+              "'transactional'='true','transactional_properties'='insert_only')");
+      stmt.execute("insert into tbl values (1)");
+      stmt.execute("insert into tbl values (2)");
+      stmt.execute("insert into tbl values (3)");
+      ResultSet res = stmt.executeQuery("select * from tbl");
+      assertNotNull(res);
+      int numRows = 0;
+      while (res.next()) {
+        numRows++;
+      }
+      assertEquals(numRows, 3);
+      res.beforeFirst();
+      while (res.next()) {
+        numRows--;
+      }
+      assertEquals(numRows, 0);
+      stmt.execute("drop table tbl");
+    } finally {
+      stmt.execute("set " + ConfVars.HIVE_SUPPORT_CONCURRENCY.varname + "=false");
+      stmt.execute("set " + ConfVars.HIVE_TXN_MANAGER.varname +
+              "=org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
+      stmt.close();
+    }
+  }
+
   // Test that opening a JDBC connection to a non-existent database throws a HiveSQLException
   @Test(expected = HiveSQLException.class)
   public void testConnectInvalidDatabase() throws SQLException {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
index caa9d83..93b1158 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java
@@ -51,6 +51,7 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
   private ListSinkOperator sink;
   private int totalRows;
   private static transient final Logger LOG = LoggerFactory.getLogger(FetchTask.class);
+  JobConf job = null;
 
   public FetchTask() {
     super();
@@ -68,7 +69,11 @@ public class FetchTask extends Task<FetchWork> implements Serializable {
 
     try {
       // Create a file system handle
-      JobConf job = new JobConf(conf);
+      if (job == null) {
+        // The job config should be initilaized once per fetch task. In case of refetch, we should use the
+        // same config.
+        job = new JobConf(conf);
+      }
 
       Operator<?> source = work.getSource();
       if (source instanceof TableScanOperator) {