You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kg...@apache.org on 2018/07/02 07:45:16 UTC
hive git commit: HIVE-20008: Fix second compilation errors in ql
(Zoltan Haindrich reviewed by Vineet Garg)
Repository: hive
Updated Branches:
refs/heads/master 221dbe085 -> b9bac8e64
HIVE-20008: Fix second compilation errors in ql (Zoltan Haindrich reviewed by Vineet Garg)
Signed-off-by: Zoltan Haindrich <ki...@rxd.hu>
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b9bac8e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b9bac8e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b9bac8e6
Branch: refs/heads/master
Commit: b9bac8e641f99e2191040b2ec43c730cb217a6bb
Parents: 221dbe0
Author: Zoltan Haindrich <ki...@rxd.hu>
Authored: Mon Jul 2 09:37:43 2018 +0200
Committer: Zoltan Haindrich <ki...@rxd.hu>
Committed: Mon Jul 2 09:37:43 2018 +0200
----------------------------------------------------------------------
.../org/apache/hadoop/hive/ql/QTestUtil.java | 3 +-
.../hive/ql/parse/BaseSemanticAnalyzer.java | 15 +--
.../hive/ql/parse/DDLSemanticAnalyzer.java | 28 +++--
.../hive/ql/parse/ExplainSemanticAnalyzer.java | 5 +-
.../hive/ql/parse/LoadSemanticAnalyzer.java | 28 ++---
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 107 ++++++++++---------
.../ql/parse/UpdateDeleteSemanticAnalyzer.java | 16 +--
.../apache/hadoop/hive/ql/plan/ExplainWork.java | 18 ++--
.../hadoop/hive/ql/reexec/ReExecDriver.java | 2 +-
.../ql/parse/TestMacroSemanticAnalyzer.java | 9 +-
.../hadoop/hive/ql/parse/TestQBCompact.java | 15 ++-
.../authorization/AuthorizationTestUtil.java | 8 +-
12 files changed, 126 insertions(+), 128 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 0bbd751..2dfd2aa 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -32,7 +32,6 @@ import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
-import java.io.Serializable;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URL;
@@ -1781,7 +1780,7 @@ public class QTestUtil {
}
- public List<Task<? extends Serializable>> analyzeAST(ASTNode ast) throws Exception {
+ public List<Task<?>> analyzeAST(ASTNode ast) throws Exception {
// Do semantic analysis and plan generation
Context ctx = new Context(conf);
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index ebea31d..be43686 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.parse;
import java.io.IOException;
-import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.text.ParseException;
import java.util.ArrayList;
@@ -36,8 +35,8 @@ import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
-import org.antlr.runtime.tree.Tree;
import org.antlr.runtime.TokenRewriteStream;
+import org.antlr.runtime.tree.Tree;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.conf.Configuration;
@@ -246,7 +245,7 @@ public abstract class BaseSemanticAnalyzer {
this.queryState = queryState;
this.conf = queryState.getConf();
this.db = db;
- rootTasks = new ArrayList<Task<? extends Serializable>>();
+ rootTasks = new ArrayList<Task<?>>();
LOG = LoggerFactory.getLogger(this.getClass().getName());
console = new LogHelper(LOG);
idToTableNameMap = new HashMap<String, String>();
@@ -289,7 +288,7 @@ public abstract class BaseSemanticAnalyzer {
// Implementations may choose to override this
}
- public List<Task<? extends Serializable>> getRootTasks() {
+ public List<Task<?>> getRootTasks() {
return rootTasks;
}
@@ -309,7 +308,7 @@ public abstract class BaseSemanticAnalyzer {
}
protected void reset(boolean clearPartsCache) {
- rootTasks = new ArrayList<Task<? extends Serializable>>();
+ rootTasks = new ArrayList<Task<?>>();
}
public static String stripIdentifierQuotes(String val) {
@@ -841,7 +840,9 @@ public abstract class BaseSemanticAnalyzer {
// it throws an error.
// This method is used to validate check expression since check expression isn't allowed to have subquery
private static void validateCheckExprAST(ASTNode checkExpr) throws SemanticException {
- if(checkExpr == null) return;
+ if(checkExpr == null) {
+ return;
+ }
if(checkExpr.getType() == HiveParser.TOK_SUBQUERY_EXPR) {
throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Subqueries are not allowed "
+ "in Check Constraints"));
@@ -2241,7 +2242,7 @@ public abstract class BaseSemanticAnalyzer {
return detail == null ? message.getMsg() : message.getMsg(detail.toString());
}
- public List<Task<? extends Serializable>> getAllRootTasks() {
+ public List<Task<?>> getAllRootTasks() {
return rootTasks;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index f9d6d41..9ad4689 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -22,7 +22,6 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASELOCATION;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASEPROPERTIES;
import java.io.FileNotFoundException;
-import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.URI;
@@ -73,7 +72,6 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask;
-import org.apache.hadoop.hive.ql.exec.DDLTask;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
@@ -110,7 +108,6 @@ import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
-import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId;
import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition;
import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
import org.apache.hadoop.hive.ql.plan.AlterWMTriggerDesc;
@@ -124,6 +121,7 @@ import org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc;
import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc;
import org.apache.hadoop.hive.ql.plan.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
@@ -686,7 +684,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeGrantRevokeRole(boolean grant, ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task;
+ Task<?> task;
if(grant) {
task = hiveAuthorizationTaskFactory.createGrantRoleTask(ast, getInputs(), getOutputs());
} else {
@@ -698,7 +696,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeShowGrant(ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task = hiveAuthorizationTaskFactory.
+ Task<?> task = hiveAuthorizationTaskFactory.
createShowGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs());
if(task != null) {
rootTasks.add(task);
@@ -707,7 +705,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeGrant(ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task = hiveAuthorizationTaskFactory.
+ Task<?> task = hiveAuthorizationTaskFactory.
createGrantTask(ast, getInputs(), getOutputs());
if(task != null) {
rootTasks.add(task);
@@ -715,7 +713,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeRevoke(ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task = hiveAuthorizationTaskFactory.
+ Task<?> task = hiveAuthorizationTaskFactory.
createRevokeTask(ast, getInputs(), getOutputs());
if(task != null) {
rootTasks.add(task);
@@ -723,7 +721,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeCreateRole(ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task = hiveAuthorizationTaskFactory.
+ Task<?> task = hiveAuthorizationTaskFactory.
createCreateRoleTask(ast, getInputs(), getOutputs());
if(task != null) {
rootTasks.add(task);
@@ -731,7 +729,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeDropRole(ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task = hiveAuthorizationTaskFactory.
+ Task<?> task = hiveAuthorizationTaskFactory.
createDropRoleTask(ast, getInputs(), getOutputs());
if(task != null) {
rootTasks.add(task);
@@ -739,7 +737,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
}
private void analyzeShowRoleGrant(ASTNode ast) throws SemanticException {
- Task<? extends Serializable> task = hiveAuthorizationTaskFactory.
+ Task<?> task = hiveAuthorizationTaskFactory.
createShowRoleGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs());
if(task != null) {
rootTasks.add(task);
@@ -1483,7 +1481,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
TruncateTableDesc truncateTblDesc = new TruncateTableDesc(tableName, partSpec, null);
DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), truncateTblDesc);
- Task<? extends Serializable> truncateTask = TaskFactory.get(ddlWork);
+ Task<?> truncateTask = TaskFactory.get(ddlWork);
// Is this a truncate column command
List<String> columnNames = null;
@@ -1613,7 +1611,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
basicStatsWork.setClearAggregatorStats(true);
StatsWork columnStatsWork = new StatsWork(table, basicStatsWork, conf);
- Task<? extends Serializable> statTask = TaskFactory.get(columnStatsWork);
+ Task<?> statTask = TaskFactory.get(columnStatsWork);
moveTsk.addDependentTask(statTask);
}
} catch (HiveException e) {
@@ -2075,7 +2073,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
addInputsOutputsAlterTable(tableName, partSpec, AlterTableTypes.MERGEFILES);
DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), mergeDesc);
ddlWork.setNeedLock(true);
- Task<? extends Serializable> mergeTask = TaskFactory.get(ddlWork);
+ Task<?> mergeTask = TaskFactory.get(ddlWork);
TableDesc tblDesc = Utilities.getTableDesc(tblObj);
Path queryTmpdir = ctx.getExternalTmpPath(newTblPartLoc);
mergeDesc.setOutputDir(queryTmpdir);
@@ -2100,7 +2098,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
basicStatsWork.setClearAggregatorStats(true);
StatsWork columnStatsWork = new StatsWork(tblObj, basicStatsWork, conf);
- Task<? extends Serializable> statTask = TaskFactory.get(columnStatsWork);
+ Task<?> statTask = TaskFactory.get(columnStatsWork);
moveTsk.addDependentTask(statTask);
}
@@ -3176,7 +3174,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
case HiveParser.TOK_RESTRICT:
break;
default:
- constraintChild = (ASTNode) child;
+ constraintChild = child;
}
}
List<SQLPrimaryKey> primaryKeys = null;
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
index 3a7d99d..6f0a803 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.parse;
import java.io.IOException;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -165,7 +164,7 @@ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
sem.validate();
ctx.setResFile(ctx.getLocalTmpPath());
- List<Task<? extends Serializable>> tasks = sem.getAllRootTasks();
+ List<Task<?>> tasks = sem.getAllRootTasks();
if (tasks == null) {
tasks = Collections.emptyList();
}
@@ -262,7 +261,7 @@ public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
@Override
public boolean skipAuthorization() {
- List<Task<? extends Serializable>> rootTasks = getRootTasks();
+ List<Task<?>> rootTasks = getRootTasks();
assert rootTasks != null && rootTasks.size() == 1;
Task task = rootTasks.get(0);
return task instanceof ExplainTask && ((ExplainTask)task).getWork().isAuthorize();
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
index cbacd05..8d33cf5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
@@ -18,27 +18,26 @@
package org.apache.hadoop.hive.ql.parse;
-import org.apache.commons.codec.DecoderException;
-import org.apache.commons.codec.net.URLCodec;
-import org.apache.hadoop.hive.conf.HiveConf.StrictChecks;
import java.io.IOException;
-import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import java.util.ArrayList;
-import java.util.HashSet;
import org.antlr.runtime.tree.Tree;
+import org.apache.commons.codec.DecoderException;
+import org.apache.commons.codec.net.URLCodec;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.StrictChecks;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.Context;
@@ -54,19 +53,18 @@ import org.apache.hadoop.hive.ql.lockmgr.LockException;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.plan.StatsWork;
import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.BasicStatsWork;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType;
import org.apache.hadoop.hive.ql.plan.MoveWork;
-import org.apache.hadoop.hive.ql.plan.BasicStatsWork;
+import org.apache.hadoop.hive.ql.plan.StatsWork;
import org.apache.hadoop.mapred.InputFormat;
-
-import com.google.common.collect.Lists;
-import org.apache.hadoop.mapred.TextInputFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.collect.Lists;
+
/**
* LoadSemanticAnalyzer.
*
@@ -339,7 +337,9 @@ public class LoadSemanticAnalyzer extends SemanticAnalyzer {
// make sure the arguments make sense
List<FileStatus> files = applyConstraintsAndGetFiles(fromURI, ts.tableHandle);
- if (queryReWritten) return;
+ if (queryReWritten) {
+ return;
+ }
// for managed tables, make sure the file formats match
if (TableType.MANAGED_TABLE.equals(ts.tableHandle.getTableType())
@@ -407,7 +407,7 @@ public class LoadSemanticAnalyzer extends SemanticAnalyzer {
loadTableWork.setInheritTableSpecs(false);
}
- Task<? extends Serializable> childTask = TaskFactory.get(
+ Task<?> childTask = TaskFactory.get(
new MoveWork(getInputs(), getOutputs(), loadTableWork, null, true,
isLocal)
);
@@ -418,7 +418,7 @@ public class LoadSemanticAnalyzer extends SemanticAnalyzer {
// Some stats like number of rows require a scan of the data
// However, some other stats, like number of files, do not require a complete scan
// Update the stats which do not require a complete scan.
- Task<? extends Serializable> statTask = null;
+ Task<?> statTask = null;
if (conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
BasicStatsWork basicStatsWork = new BasicStatsWork(loadTableWork);
basicStatsWork.setNoStatsAggregator(true);
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index b389a9b..2731f19 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -18,13 +18,32 @@
package org.apache.hadoop.hive.ql.parse;
-import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Sets;
-import com.google.common.math.IntMath;
-import com.google.common.math.LongMath;
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.security.AccessControlException;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Deque;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Queue;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.function.Supplier;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.stream.Collectors;
+
import org.antlr.runtime.ClassicToken;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.Token;
@@ -111,11 +130,11 @@ import org.apache.hadoop.hive.ql.io.AcidInputFormat;
import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.AcidUtils.Operation;
-import org.apache.hadoop.hive.ql.io.arrow.ArrowColumnarBatchSerDe;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.io.NullRowsInputFormat;
+import org.apache.hadoop.hive.ql.io.arrow.ArrowColumnarBatchSerDe;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
@@ -168,7 +187,6 @@ import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowType;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
-import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
@@ -254,33 +272,13 @@ import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.security.UserGroupInformation;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.Serializable;
-import java.security.AccessControlException;
-import java.util.ArrayDeque;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Deque;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Optional;
-import java.util.Queue;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.UUID;
-import java.util.function.Supplier;
-import java.util.regex.Pattern;
-import java.util.regex.PatternSyntaxException;
-import java.util.stream.Collectors;
-
-import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
+import com.google.common.base.Splitter;
+import com.google.common.base.Strings;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Sets;
+import com.google.common.math.IntMath;
+import com.google.common.math.LongMath;
/**
* Implementation of the semantic analyzer. It generates the query plan.
@@ -667,7 +665,9 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
*/
private boolean isInsertInto(QBParseInfo qbp, String dest) {
// get the destination and check if it is TABLE
- if(qbp == null || dest == null ) return false;
+ if(qbp == null || dest == null ) {
+ return false;
+ }
ASTNode destNode = qbp.getDestForClause(dest);
if(destNode != null && destNode.getType() == HiveParser.TOK_TAB) {
return true;
@@ -680,7 +680,9 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
* e.g. VALUES(1,3..)
*/
private boolean isValueClause(ASTNode select) {
- if(select == null) return false;
+ if(select == null) {
+ return false;
+ }
if(select.getChildCount() == 1) {
ASTNode selectExpr = (ASTNode)select.getChild(0);
if(selectExpr.getChildCount() == 1 ) {
@@ -1262,7 +1264,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
private final CTEClause rootClause = new CTEClause(null, null);
@Override
- public List<Task<? extends Serializable>> getAllRootTasks() {
+ public List<Task<?>> getAllRootTasks() {
if (!rootTasksResolved) {
rootTasks = toRealRootTasks(rootClause.asExecutionOrder());
rootTasksResolved = true;
@@ -1308,7 +1310,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
Table table;
SemanticAnalyzer source;
- List<Task<? extends Serializable>> getTasks() {
+ List<Task<?>> getTasks() {
return source == null ? null : source.rootTasks;
}
@@ -1333,11 +1335,11 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
}
}
- private List<Task<? extends Serializable>> toRealRootTasks(List<CTEClause> execution) {
- List<Task<? extends Serializable>> cteRoots = new ArrayList<>();
- List<Task<? extends Serializable>> cteLeafs = new ArrayList<>();
- List<Task<? extends Serializable>> curTopRoots = null;
- List<Task<? extends Serializable>> curBottomLeafs = null;
+ private List<Task<?>> toRealRootTasks(List<CTEClause> execution) {
+ List<Task<?>> cteRoots = new ArrayList<>();
+ List<Task<?>> cteLeafs = new ArrayList<>();
+ List<Task<?>> curTopRoots = null;
+ List<Task<?>> curBottomLeafs = null;
for (int i = 0; i < execution.size(); i++) {
CTEClause current = execution.get(i);
if (current.parents.isEmpty() && curTopRoots != null) {
@@ -1345,7 +1347,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
cteLeafs.addAll(curBottomLeafs);
curTopRoots = curBottomLeafs = null;
}
- List<Task<? extends Serializable>> curTasks = current.getTasks();
+ List<Task<?>> curTasks = current.getTasks();
if (curTasks == null) {
continue;
}
@@ -6987,7 +6989,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
checkExpr.addChild(ASTBuilder.createAST(oldColChild.getType(), newColRef));
}
else {
- for(int i=0; i< ((ASTNode)checkExpr).getChildCount(); i++) {
+ for(int i=0; i< checkExpr.getChildCount(); i++) {
replaceColumnReference((ASTNode)(checkExpr.getChild(i)), col2Col, inputRR);
}
}
@@ -12808,19 +12810,19 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK);
// validate all tasks
- for (Task<? extends Serializable> rootTask : rootTasks) {
+ for (Task<?> rootTask : rootTasks) {
validate(rootTask, reworkMapredWork);
}
}
- private void validate(Task<? extends Serializable> task, boolean reworkMapredWork)
+ private void validate(Task<?> task, boolean reworkMapredWork)
throws SemanticException {
Utilities.reworkMapRedWork(task, reworkMapredWork, conf);
if (task.getChildTasks() == null) {
return;
}
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
validate(childTask, reworkMapredWork);
}
}
@@ -14921,10 +14923,11 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
rewrittenQueryStr.append(" partition (");
boolean first = true;
for (FieldSchema fschema : partCols) {
- if (first)
+ if (first) {
first = false;
- else
+ } else {
rewrittenQueryStr.append(", ");
+ }
//would be nice if there was a way to determine if quotes are needed
rewrittenQueryStr.append(HiveUtils.unparseIdentifier(fschema.getName(), this.conf));
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
index d9483f8..ce7e65a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
@@ -302,7 +302,7 @@ public class UpdateDeleteSemanticAnalyzer extends SemanticAnalyzer {
/**
* Makes the exportTask run after all other tasks of the "insert into T ..." are done.
*/
- private void addExportTask(List<Task<? extends Serializable>> rootTasks,
+ private void addExportTask(List<Task<?>> rootTasks,
Task<ExportWork> exportTask, Task<DDLWork> alterTable) {
for(Task<? extends Serializable> t : rootTasks) {
if(t.getNumChild() <= 0) {
@@ -315,8 +315,9 @@ public class UpdateDeleteSemanticAnalyzer extends SemanticAnalyzer {
}
}
}
- private List<Task<? extends Serializable>> findStatsTasks(
- List<Task<? extends Serializable>> rootTasks, List<Task<? extends Serializable>> statsTasks) {
+
+ private List<Task<?>> findStatsTasks(
+ List<Task<?>> rootTasks, List<Task<?>> statsTasks) {
for(Task<? extends Serializable> t : rootTasks) {
if (t instanceof StatsTask) {
if(statsTasks == null) {
@@ -330,16 +331,17 @@ public class UpdateDeleteSemanticAnalyzer extends SemanticAnalyzer {
}
return statsTasks;
}
- private void removeStatsTasks(List<Task<? extends Serializable>> rootTasks) {
- List<Task<? extends Serializable>> statsTasks = findStatsTasks(rootTasks, null);
+
+ private void removeStatsTasks(List<Task<?>> rootTasks) {
+ List<Task<?>> statsTasks = findStatsTasks(rootTasks, null);
if(statsTasks == null) {
return;
}
- for(Task<? extends Serializable> statsTask : statsTasks) {
+ for (Task<?> statsTask : statsTasks) {
if(statsTask.getParentTasks() == null) {
continue; //should never happen
}
- for(Task<? extends Serializable> t : new ArrayList<>(statsTask.getParentTasks())) {
+ for (Task<?> t : new ArrayList<>(statsTask.getParentTasks())) {
t.removeDependentTask(statsTask);
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
index cde7852..2cdf8cf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
@@ -39,8 +39,8 @@ public class ExplainWork implements Serializable {
private static final long serialVersionUID = 1L;
private Path resFile;
- private ArrayList<Task<? extends Serializable>> rootTasks;
- private Task<? extends Serializable> fetchTask;
+ private ArrayList<Task<?>> rootTasks;
+ private Task<?> fetchTask;
private HashSet<ReadEntity> inputs;
private ParseContext pCtx;
@@ -57,13 +57,13 @@ public class ExplainWork implements Serializable {
public ExplainWork(Path resFile,
ParseContext pCtx,
- List<Task<? extends Serializable>> rootTasks,
- Task<? extends Serializable> fetchTask,
+ List<Task<?>> rootTasks,
+ Task<?> fetchTask,
BaseSemanticAnalyzer analyzer,
ExplainConfiguration config,
String cboInfo) {
this.resFile = resFile;
- this.rootTasks = new ArrayList<Task<? extends Serializable>>(rootTasks);
+ this.rootTasks = new ArrayList<Task<?>>(rootTasks);
this.fetchTask = fetchTask;
this.analyzer = analyzer;
if (analyzer != null) {
@@ -82,19 +82,19 @@ public class ExplainWork implements Serializable {
this.resFile = resFile;
}
- public ArrayList<Task<? extends Serializable>> getRootTasks() {
+ public ArrayList<Task<?>> getRootTasks() {
return rootTasks;
}
- public void setRootTasks(ArrayList<Task<? extends Serializable>> rootTasks) {
+ public void setRootTasks(ArrayList<Task<?>> rootTasks) {
this.rootTasks = rootTasks;
}
- public Task<? extends Serializable> getFetchTask() {
+ public Task<?> getFetchTask() {
return fetchTask;
}
- public void setFetchTask(Task<? extends Serializable> fetchTask) {
+ public void setFetchTask(Task<?> fetchTask) {
this.fetchTask = fetchTask;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
index 501f0b4..3bc3b29 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/reexec/ReExecDriver.java
@@ -80,7 +80,7 @@ public class ReExecDriver implements IDriver {
}
@Override
- public void postAnalyze(HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks)
+ public void postAnalyze(HiveSemanticAnalyzerHookContext context, List<Task<?>> rootTasks)
throws SemanticException {
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index 906d70d..0334cf2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -17,11 +17,8 @@
*/
package org.apache.hadoop.hive.ql.parse;
-import java.io.Serializable;
import java.util.List;
-import junit.framework.Assert;
-
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.Context;
@@ -33,6 +30,8 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro;
import org.junit.Before;
import org.junit.Test;
+import junit.framework.Assert;
+
public class TestMacroSemanticAnalyzer {
private MacroSemanticAnalyzer analyzer;
@@ -54,9 +53,9 @@ public class TestMacroSemanticAnalyzer {
}
private void analyze(ASTNode ast) throws Exception {
analyzer.analyze(ast, context);
- List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
+ List<Task<?>> rootTasks = analyzer.getRootTasks();
Assert.assertEquals(1, rootTasks.size());
- for(Task<? extends Serializable> task : rootTasks) {
+ for (Task<?> task : rootTasks) {
Assert.assertEquals(0, task.executeTask(null));
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
index 49d900b..9a45ccb 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java
@@ -17,7 +17,10 @@
*/
package org.apache.hadoop.hive.ql.parse;
-import junit.framework.Assert;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
@@ -31,15 +34,11 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.session.SessionState;
-import org.junit.BeforeClass;
import org.junit.AfterClass;
+import org.junit.BeforeClass;
import org.junit.Test;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import junit.framework.Assert;
/**
* Tests for parsing and semantic analysis of ALTER TABLE ... compact.
@@ -81,7 +80,7 @@ public class TestQBCompact {
ASTNode head = (ASTNode)hd.parse(query).getChild(0);
BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head);
a.analyze(head, new Context(conf));
- List<Task<? extends Serializable>> roots = a.getRootTasks();
+ List<Task<?>> roots = a.getRootTasks();
Assert.assertEquals(1, roots.size());
return ((DDLWork)roots.get(0).getWork()).getAlterTblSimpleDesc();
}
http://git-wip-us.apache.org/repos/asf/hive/blob/b9bac8e6/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
index a76e2ea..40753b6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/AuthorizationTestUtil.java
@@ -17,22 +17,20 @@
*/
package org.apache.hadoop.hive.ql.parse.authorization;
-import java.io.Serializable;
import java.util.List;
-import junit.framework.Assert;
-
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.session.SessionState;
+import junit.framework.Assert;
+
/**
* Util function for authorization tests
*/
@@ -50,7 +48,7 @@ public class AuthorizationTestUtil {
DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
SessionState.start(queryState.getConf());
analyzer.analyze(ast, new Context(queryState.getConf()));
- List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
+ List<Task<?>> rootTasks = analyzer.getRootTasks();
return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
}