You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by dm...@apache.org on 2020/04/01 13:25:34 UTC
[hive] branch master updated: HIVE-23096: Review Code Path for
getResults (David Mollitor reviewed by Naveen Gangnam)
This is an automated email from the ASF dual-hosted git repository.
dmollitor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 484d182 HIVE-23096: Review Code Path for getResults (David Mollitor reviewed by Naveen Gangnam)
484d182 is described below
commit 484d1823152bb3becaf5a753673d581fa405d9e5
Author: David Mollitor <dm...@apache.org>
AuthorDate: Wed Apr 1 09:25:15 2020 -0400
HIVE-23096: Review Code Path for getResults (David Mollitor reviewed by Naveen Gangnam)
---
ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 11 +++++++----
.../apache/hive/service/cli/operation/SQLOperation.java | 16 +++++++++-------
2 files changed, 16 insertions(+), 11 deletions(-)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 7024910..517b0cc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -888,9 +889,10 @@ public class Driver implements IDriver {
}
int numRows = 0;
- String row = null;
while (numRows < maxRows) {
+ final String row;
+
if (driverContext.getResStream() == null) {
return (numRows > 0);
}
@@ -900,16 +902,17 @@ public class Driver implements IDriver {
try {
ss = Utilities.readColumn(driverContext.getResStream(), bos);
if (bos.getLength() > 0) {
- row = new String(bos.getData(), 0, bos.getLength(), "UTF-8");
+ row = new String(bos.getData(), 0, bos.getLength(), StandardCharsets.UTF_8);
} else if (ss == Utilities.StreamStatus.TERMINATED) {
- row = new String();
+ row = "";
+ } else {
+ row = null;
}
if (row != null) {
numRows++;
res.add(row);
}
- row = null;
} catch (IOException e) {
CONSOLE.printError("FAILED: Unexpected IO exception : " + e.getMessage());
return false;
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 96770f4..eefd644 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -96,6 +96,7 @@ public class SQLOperation extends ExecuteStatementOperation {
private ScheduledExecutorService timeoutExecutor;
private final boolean runAsync;
private final long operationLogCleanupDelayMs;
+ private final ArrayList<Object> convey = new ArrayList<>();
/**
* A map to track query count running by each user
@@ -444,8 +445,6 @@ public class SQLOperation extends ExecuteStatementOperation {
return resultSchema;
}
- private transient final List<Object> convey = new ArrayList<Object>();
-
@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows)
throws HiveSQLException {
@@ -461,7 +460,6 @@ public class SQLOperation extends ExecuteStatementOperation {
maxRows = 1;
isBlobBased = true;
}
- driver.setMaxRows(Math.toIntExact(maxRows));
RowSet rowSet = RowSetFactory.create(getResultSetSchema(), getProtocolVersion(), isBlobBased);
try {
/* if client is requesting fetch-from-start and its not the first time reading from this operation
@@ -471,15 +469,19 @@ public class SQLOperation extends ExecuteStatementOperation {
driver.resetFetch();
}
fetchStarted = true;
- driver.setMaxRows(Math.toIntExact(maxRows));
+
+ final int capacity = Math.toIntExact(maxRows);
+ convey.ensureCapacity(capacity);
+ driver.setMaxRows(capacity);
if (driver.getResults(convey)) {
+ if (convey.size() == capacity) {
+ LOG.info("Result set buffer filled to capacity [{}]", capacity);
+ }
return decode(convey, rowSet);
}
return rowSet;
- } catch (IOException e) {
- throw new HiveSQLException(e);
} catch (Exception e) {
- throw new HiveSQLException(e);
+ throw new HiveSQLException("Unable to get the next row set", e);
} finally {
convey.clear();
}