You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by om...@apache.org on 2017/05/31 16:49:54 UTC
[2/8] hive git commit: HIVE-14362: Support explain analyze in Hive
(addendum)
HIVE-14362: Support explain analyze in Hive (addendum)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3f82447a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3f82447a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3f82447a
Branch: refs/heads/branch-2.2
Commit: 3f82447acc4939d0bbedcd14276b1f40a261b659
Parents: a046198
Author: Owen O'Malley <om...@apache.org>
Authored: Thu May 25 13:39:08 2017 -0700
Committer: Owen O'Malley <om...@apache.org>
Committed: Wed May 31 09:41:32 2017 -0700
----------------------------------------------------------------------
ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 12 ++++++------
.../hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java | 2 ++
2 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/3f82447a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 08bd040..c67ea86 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -396,12 +396,12 @@ public class Driver implements CommandProcessor {
}
if (ctx != null && ctx.getExplainAnalyze() != AnalyzeState.RUNNING) {
- close();
+ closeInProcess(false);
}
if (isInterrupted()) {
return handleInterruption("at beginning of compilation."); //indicate if need clean resource
}
-
+
if (resetTaskIds) {
TaskFactory.resetId();
}
@@ -444,7 +444,7 @@ public class Driver implements CommandProcessor {
if (ctx == null) {
ctx = new Context(conf);
}
-
+
ctx.setTryCount(getTryCount());
ctx.setCmd(command);
ctx.setHDFSCleanup(true);
@@ -687,7 +687,7 @@ public class Driver implements CommandProcessor {
}
// The following union operation returns a union, which traverses over the
- // first set once and then then over each element of second set, in order,
+ // first set once and then then over each element of second set, in order,
// that is not contained in first. This means it doesn't replace anything
// in first set, and would preserve the WriteType in WriteEntity in first
// set in case of outputs list.
@@ -1009,7 +1009,7 @@ public class Driver implements CommandProcessor {
conf.set(ValidTxnList.VALID_TXNS_KEY, txnStr);
if(plan.getFetchTask() != null) {
/**
- * This is needed for {@link HiveConf.ConfVars.HIVEFETCHTASKCONVERSION} optimization which
+ * This is needed for {@link HiveConf.ConfVars.HIVEFETCHTASKCONVERSION} optimization which
* initializes JobConf in FetchOperator before recordValidTxns() but this has to be done
* after locks are acquired to avoid race conditions in ACID.
*/
@@ -2348,7 +2348,7 @@ public class Driver implements CommandProcessor {
this.operationId = opId;
}
- /**
+ /**
* Resets QueryState to get new queryId on Driver reuse.
*/
public void resetQueryState() {
http://git-wip-us.apache.org/repos/asf/hive/blob/3f82447a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
index a573808..bd566e9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
@@ -268,8 +268,10 @@ public class TestUpdateDeleteSemanticAnalyzer {
// connection, which is conveniently created by the semantic analyzer.
Map<String, String> params = new HashMap<String, String>(1);
params.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
+ db.dropTable("T");
db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class,
OrcOutputFormat.class, 2, Arrays.asList("a"), params);
+ db.dropTable("U");
db.createTable("U", Arrays.asList("a", "b"), Arrays.asList("ds"), OrcInputFormat.class,
OrcOutputFormat.class, 2, Arrays.asList("a"), params);
Table u = db.getTable("U");