You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2019/09/30 21:48:25 UTC
[hive] branch master updated: HIVE-21975: Fix incremental
compilation (Steve Carlin, reviewed by Zoltan Haindrich)
This is an automated email from the ASF dual-hosted git repository.
jcamacho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new e06bf8d HIVE-21975: Fix incremental compilation (Steve Carlin, reviewed by Zoltan Haindrich)
e06bf8d is described below
commit e06bf8d273f3e391b269da4d3217f12bbc165cfc
Author: Steve Carlin <sc...@cloudera.com>
AuthorDate: Mon Sep 30 14:47:51 2019 -0700
HIVE-21975: Fix incremental compilation (Steve Carlin, reviewed by Zoltan Haindrich)
Close apache/hive#786
---
.../cli/SemanticAnalysis/CreateDatabaseHook.java | 2 +-
.../cli/SemanticAnalysis/CreateTableHook.java | 2 +-
.../cli/SemanticAnalysis/HCatSemanticAnalyzer.java | 2 +-
.../SemanticAnalysis/HCatSemanticAnalyzerBase.java | 6 +-
.../org/apache/hadoop/hive/hooks/TestHs2Hooks.java | 2 +-
.../hive/ql/parse/TestReplicationScenarios.java | 4 +-
.../apache/hive/jdbc/miniHS2/TestHs2Metrics.java | 2 +-
.../hooks/VerifyHiveSortedInputFormatUsedHook.java | 4 +-
.../hive/ql/hooks/VerifyHooksRunInOrder.java | 4 +-
.../ql/metadata/DummySemanticAnalyzerHook.java | 4 +-
.../ql/metadata/DummySemanticAnalyzerHook1.java | 2 +-
ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 32 ++++----
.../org/apache/hadoop/hive/ql/DriverContext.java | 12 +--
.../java/org/apache/hadoop/hive/ql/HookRunner.java | 2 +-
.../org/apache/hadoop/hive/ql/QueryDisplay.java | 6 +-
.../java/org/apache/hadoop/hive/ql/QueryPlan.java | 36 ++++-----
.../hadoop/hive/ql/exec/ConditionalTask.java | 30 ++++----
.../org/apache/hadoop/hive/ql/exec/NodeUtils.java | 4 +-
.../java/org/apache/hadoop/hive/ql/exec/Task.java | 66 ++++++++--------
.../apache/hadoop/hive/ql/exec/TaskFactory.java | 8 +-
.../org/apache/hadoop/hive/ql/exec/TaskRunner.java | 6 +-
.../org/apache/hadoop/hive/ql/exec/Utilities.java | 18 ++---
.../hive/ql/exec/mr/HadoopJobExecHelper.java | 4 +-
.../ql/exec/repl/ExternalTableCopyTaskBuilder.java | 4 +-
.../hadoop/hive/ql/exec/repl/ReplLoadTask.java | 16 ++--
.../hadoop/hive/ql/exec/repl/ReplLoadWork.java | 6 +-
.../exec/repl/bootstrap/load/LoadConstraint.java | 2 +-
.../ql/exec/repl/bootstrap/load/LoadDatabase.java | 10 +--
.../ql/exec/repl/bootstrap/load/LoadFunction.java | 4 +-
.../repl/bootstrap/load/table/LoadPartitions.java | 4 +-
.../exec/repl/bootstrap/load/table/LoadTable.java | 2 +-
.../incremental/IncrementalLoadTasksBuilder.java | 48 ++++++------
.../ql/exec/repl/util/AddDependencyToLeaves.java | 10 +--
.../hadoop/hive/ql/exec/repl/util/ReplUtils.java | 16 ++--
.../hadoop/hive/ql/exec/repl/util/TaskTracker.java | 26 +++----
.../hadoop/hive/ql/exec/util/DAGTraversal.java | 12 +--
.../apache/hadoop/hive/ql/history/HiveHistory.java | 6 +-
.../hadoop/hive/ql/history/HiveHistoryImpl.java | 6 +-
.../hive/ql/hooks/NoOperatorReuseCheckerHook.java | 4 +-
.../apache/hadoop/hive/ql/lib/TaskGraphWalker.java | 16 ++--
.../hadoop/hive/ql/optimizer/GenMRFileSink1.java | 20 ++---
.../hadoop/hive/ql/optimizer/GenMRProcContext.java | 46 +++++------
.../hadoop/hive/ql/optimizer/GenMRRedSink1.java | 4 +-
.../hadoop/hive/ql/optimizer/GenMRRedSink2.java | 6 +-
.../hadoop/hive/ql/optimizer/GenMRRedSink3.java | 6 +-
.../hadoop/hive/ql/optimizer/GenMRUnion1.java | 16 ++--
.../hadoop/hive/ql/optimizer/GenMapRedUtils.java | 88 +++++++++++-----------
.../hadoop/hive/ql/optimizer/MapJoinFactory.java | 10 +--
.../physical/AbstractJoinTaskDispatcher.java | 34 ++++-----
.../physical/AnnotateRunTimeStatsOptimizer.java | 2 +-
.../physical/CommonJoinTaskDispatcher.java | 14 ++--
.../ql/optimizer/physical/CrossProductHandler.java | 6 +-
.../optimizer/physical/GenMRSkewJoinProcessor.java | 22 +++---
.../physical/GenSparkSkewJoinProcessor.java | 26 +++----
.../hive/ql/optimizer/physical/LlapDecider.java | 2 +-
.../physical/LlapPreVectorizationPass.java | 2 +-
.../ql/optimizer/physical/MapJoinResolver.java | 40 +++++-----
.../hive/ql/optimizer/physical/MemoryDecider.java | 2 +-
.../optimizer/physical/NullScanTaskDispatcher.java | 2 +-
.../ql/optimizer/physical/PhysicalContext.java | 20 ++---
.../ql/optimizer/physical/SerializeFilter.java | 2 +-
.../ql/optimizer/physical/SkewJoinProcFactory.java | 2 +-
.../ql/optimizer/physical/SkewJoinResolver.java | 10 +--
.../physical/SortMergeJoinTaskDispatcher.java | 8 +-
.../optimizer/physical/SparkCrossProductCheck.java | 6 +-
.../SparkDynamicPartitionPruningResolver.java | 6 +-
.../optimizer/physical/SparkMapJoinResolver.java | 28 +++----
.../ql/optimizer/physical/StageIDsRearranger.java | 6 +-
.../hive/ql/optimizer/physical/Vectorizer.java | 2 +-
.../optimizer/spark/SparkSkewJoinProcFactory.java | 12 +--
.../ql/optimizer/spark/SparkSkewJoinResolver.java | 4 +-
.../ql/optimizer/spark/SplitSparkWorkResolver.java | 2 +-
.../ql/parse/AbstractSemanticAnalyzerHook.java | 2 +-
.../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java | 2 +-
.../org/apache/hadoop/hive/ql/parse/EximUtil.java | 12 +--
.../hadoop/hive/ql/parse/GenTezProcContext.java | 4 +-
.../hive/ql/parse/HiveSemanticAnalyzerHook.java | 2 +-
.../hive/ql/parse/ImportSemanticAnalyzer.java | 4 +-
.../hadoop/hive/ql/parse/MapReduceCompiler.java | 26 +++----
.../apache/hadoop/hive/ql/parse/ParseContext.java | 8 +-
.../apache/hadoop/hive/ql/parse/TaskCompiler.java | 70 ++++++++---------
.../apache/hadoop/hive/ql/parse/TezCompiler.java | 14 ++--
.../HiveAuthorizationTaskFactory.java | 24 +++---
.../HiveAuthorizationTaskFactoryImpl.java | 26 +++----
.../parse/repl/load/message/AbortTxnHandler.java | 2 +-
.../repl/load/message/AddForeignKeyHandler.java | 4 +-
.../load/message/AddNotNullConstraintHandler.java | 4 +-
.../repl/load/message/AddPrimaryKeyHandler.java | 4 +-
.../load/message/AddUniqueConstraintHandler.java | 4 +-
.../repl/load/message/AllocWriteIdHandler.java | 4 +-
.../repl/load/message/AlterDatabaseHandler.java | 2 +-
.../parse/repl/load/message/CommitTxnHandler.java | 4 +-
.../repl/load/message/CreateDatabaseHandler.java | 4 +-
.../repl/load/message/CreateFunctionHandler.java | 6 +-
.../ql/parse/repl/load/message/DefaultHandler.java | 2 +-
.../load/message/DeletePartColStatHandler.java | 2 +-
.../load/message/DeleteTableColStatHandler.java | 2 +-
.../repl/load/message/DropConstraintHandler.java | 2 +-
.../repl/load/message/DropDatabaseHandler.java | 4 +-
.../repl/load/message/DropFunctionHandler.java | 2 +-
.../repl/load/message/DropPartitionHandler.java | 2 +-
.../parse/repl/load/message/DropTableHandler.java | 2 +-
.../ql/parse/repl/load/message/InsertHandler.java | 4 +-
.../ql/parse/repl/load/message/MessageHandler.java | 6 +-
.../ql/parse/repl/load/message/OpenTxnHandler.java | 2 +-
.../repl/load/message/RenamePartitionHandler.java | 2 +-
.../repl/load/message/RenameTableHandler.java | 2 +-
.../ql/parse/repl/load/message/TableHandler.java | 8 +-
.../load/message/TruncatePartitionHandler.java | 2 +-
.../repl/load/message/TruncateTableHandler.java | 2 +-
.../load/message/UpdatePartColStatHandler.java | 2 +-
.../load/message/UpdateTableColStatHandler.java | 2 +-
.../hive/ql/parse/spark/GenSparkProcContext.java | 4 +-
.../hadoop/hive/ql/parse/spark/GenSparkUtils.java | 2 +-
.../hadoop/hive/ql/parse/spark/SparkCompiler.java | 14 ++--
.../hadoop/hive/ql/plan/ConditionalResolver.java | 2 +-
.../ql/plan/ConditionalResolverCommonJoin.java | 28 +++----
.../ql/plan/ConditionalResolverMergeFiles.java | 26 +++----
.../hive/ql/plan/ConditionalResolverSkewJoin.java | 32 ++++----
.../hadoop/hive/ql/plan/ImportTableDesc.java | 2 +-
.../apache/hadoop/hive/ql/exec/TestUtilities.java | 26 +++----
.../repl/bootstrap/AddDependencyToLeavesTest.java | 12 +--
.../exec/repl/bootstrap/load/TestTaskTracker.java | 4 +-
.../hadoop/hive/ql/exec/util/DAGTraversalTest.java | 20 ++---
.../TestGenMapRedUtilsCreateConditionalTask.java | 22 +++---
.../hadoop/hive/ql/parse/TestGenTezWork.java | 2 +-
.../ql/plan/TestConditionalResolverCommonJoin.java | 6 +-
.../hadoop/hive/ql/plan/TestReadEntityDirect.java | 2 +-
.../apache/hadoop/hive/ql/plan/TestViewEntity.java | 2 +-
.../apache/hive/service/cli/CLIServiceTest.java | 2 +-
130 files changed, 698 insertions(+), 698 deletions(-)
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
index 34157d9..369c663 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
@@ -79,7 +79,7 @@ final class CreateDatabaseHook extends HCatSemanticAnalyzerBase {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
context.getConf().set(HCatConstants.HCAT_CREATE_DB_NAME, databaseName);
super.postAnalyze(context, rootTasks);
}
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
index 3dad6d2..540ecd1 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
@@ -132,7 +132,7 @@ final class CreateTableHook extends HCatSemanticAnalyzerBase {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks)
+ List<Task<?>> rootTasks)
throws SemanticException {
if (rootTasks.size() == 0) {
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
index 2a96e05..b86a65f 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
@@ -152,7 +152,7 @@ public class HCatSemanticAnalyzer extends HCatSemanticAnalyzerBase {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
try {
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
index 970733c..8487e3a 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
@@ -55,7 +55,7 @@ public class HCatSemanticAnalyzerBase extends AbstractSemanticAnalyzerHook {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
super.postAnalyze(context, rootTasks);
//Authorize the operation.
@@ -86,7 +86,7 @@ public class HCatSemanticAnalyzerBase extends AbstractSemanticAnalyzerHook {
* @see https://issues.apache.org/jira/browse/HCATALOG-245
*/
protected void authorizeDDL(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
if (!HCatAuthUtil.isAuthorizationEnabled(context.getConf())) {
return;
@@ -96,7 +96,7 @@ public class HCatSemanticAnalyzerBase extends AbstractSemanticAnalyzerHook {
try {
hive = context.getHive();
- for (Task<? extends Serializable> task : rootTasks) {
+ for (Task<?> task : rootTasks) {
if (task.getWork() instanceof DDLWork) {
DDLWork work = (DDLWork) task.getWork();
if (work != null) {
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index 509b178..7d441b6 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -121,7 +121,7 @@ public class TestHs2Hooks {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
try {
userName = context.getUserName();
ipAddress = context.getIpAddress();
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index ce70952..36e0fcf 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -356,11 +356,11 @@ public class TestReplicationScenarios {
if (validate(rootTask)) {
return true;
}
- List<Task<? extends Serializable>> childTasks = rootTask.getChildTasks();
+ List<Task<?>> childTasks = rootTask.getChildTasks();
if (childTasks == null) {
return false;
}
- for (Task<? extends Serializable> childTask : childTasks) {
+ for (Task<?> childTask : childTasks) {
if (hasTask(childTask)) {
return true;
}
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java
index 9686445..d663172 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java
@@ -67,7 +67,7 @@ public class TestHs2Metrics {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
}
}
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java
index 6a16108..1e0af9c 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHiveSortedInputFormatUsedHook.java
@@ -33,8 +33,8 @@ public class VerifyHiveSortedInputFormatUsedHook implements ExecuteWithHookConte
// Go through the root tasks, and verify the input format of the map reduce task(s) is
// HiveSortedInputFormat
- List<Task<? extends Serializable>> rootTasks = hookContext.getQueryPlan().getRootTasks();
- for (Task<? extends Serializable> rootTask : rootTasks) {
+ List<Task<?>> rootTasks = hookContext.getQueryPlan().getRootTasks();
+ for (Task<?> rootTask : rootTasks) {
if (rootTask.getWork() instanceof MapredWork) {
Assert.assertTrue("The root map reduce task's input was not marked as sorted.",
((MapredWork)rootTask.getWork()).getMapWork().isInputFormatSorted());
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java
index 623d845..f272a15 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/hooks/VerifyHooksRunInOrder.java
@@ -110,7 +110,7 @@ public class VerifyHooksRunInOrder {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
LogHelper console = SessionState.getConsole();
if (console == null) {
@@ -145,7 +145,7 @@ public class VerifyHooksRunInOrder {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
LogHelper console = SessionState.getConsole();
if (console == null) {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
index 88c3bd1..8ccbf97 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
@@ -61,7 +61,7 @@ public class DummySemanticAnalyzerHook extends AbstractSemanticAnalyzerHook{
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
if(hook != null) {
hook.postAnalyze(context, rootTasks);
@@ -91,7 +91,7 @@ class DummyCreateTableHook extends AbstractSemanticAnalyzerHook{
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
CreateTableDesc desc = (CreateTableDesc) ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getDDLDesc();
Map<String,String> tblProps = desc.getTblProps();
if(tblProps == null) {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
index 59c3406..00e7582 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
@@ -56,7 +56,7 @@ public class DummySemanticAnalyzerHook1 extends AbstractSemanticAnalyzerHook {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
count = 0;
if (!isCreateTable) {
return;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 00b21d5..bcd4600 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -1552,10 +1552,10 @@ public class Driver implements IDriver {
if (!HiveConf.getBoolVar(conf, ConfVars.HIVE_LOCK_MAPRED_ONLY)) {
return true;
}
- Queue<Task<? extends Serializable>> taskQueue = new LinkedList<Task<? extends Serializable>>();
+ Queue<Task<?>> taskQueue = new LinkedList<Task<?>>();
taskQueue.addAll(plan.getRootTasks());
while (taskQueue.peek() != null) {
- Task<? extends Serializable> tsk = taskQueue.remove();
+ Task<?> tsk = taskQueue.remove();
if (tsk.requireLock()) {
return true;
}
@@ -1768,7 +1768,7 @@ public class Driver implements IDriver {
SessionState.get().setLocalMapRedErrors(new HashMap<>());
// Add root Tasks to runnable
- for (Task<? extends Serializable> tsk : plan.getRootTasks()) {
+ for (Task<?> tsk : plan.getRootTasks()) {
// This should never happen, if it does, it's a bug with the potential to produce
// incorrect results.
assert tsk.getParentTasks() == null || tsk.getParentTasks().isEmpty();
@@ -1785,7 +1785,7 @@ public class Driver implements IDriver {
// Loop while you either have tasks running, or tasks queued up
while (driverCxt.isRunning()) {
// Launch upto maxthreads tasks
- Task<? extends Serializable> task;
+ Task<?> task;
while ((task = driverCxt.getRunnable(maxthreads)) != null) {
TaskRunner runner = launchTask(task, queryId, noName, jobname, jobs, driverCxt);
if (!runner.isRunning()) {
@@ -1813,14 +1813,14 @@ public class Driver implements IDriver {
queryDisplay.setTaskResult(tskRun.getTask().getId(), tskRun.getTaskResult());
- Task<? extends Serializable> tsk = tskRun.getTask();
+ Task<?> tsk = tskRun.getTask();
TaskResult result = tskRun.getTaskResult();
int exitVal = result.getExitVal();
checkInterrupted("when checking the execution result.", hookContext, perfLogger);
if (exitVal != 0) {
- Task<? extends Serializable> backupTask = tsk.getAndInitBackupTask();
+ Task<?> backupTask = tsk.getAndInitBackupTask();
if (backupTask != null) {
String errorMessage = getErrorMsgAndDetail(exitVal, result.getTaskError(), tsk);
console.printError(errorMessage);
@@ -1870,7 +1870,7 @@ public class Driver implements IDriver {
}
if (tsk.getChildTasks() != null) {
- for (Task<? extends Serializable> child : tsk.getChildTasks()) {
+ for (Task<?> child : tsk.getChildTasks()) {
if (DriverContext.isLaunchable(child)) {
driverCxt.addToRunnable(child);
}
@@ -2023,20 +2023,20 @@ public class Driver implements IDriver {
}
}
- private void setQueryDisplays(List<Task<? extends Serializable>> tasks) {
+ private void setQueryDisplays(List<Task<?>> tasks) {
if (tasks != null) {
- Set<Task<? extends Serializable>> visited = new HashSet<Task<? extends Serializable>>();
+ Set<Task<?>> visited = new HashSet<Task<?>>();
while (!tasks.isEmpty()) {
tasks = setQueryDisplays(tasks, visited);
}
}
}
- private List<Task<? extends Serializable>> setQueryDisplays(
- List<Task<? extends Serializable>> tasks,
- Set<Task<? extends Serializable>> visited) {
- List<Task<? extends Serializable>> childTasks = new ArrayList<>();
- for (Task<? extends Serializable> task : tasks) {
+ private List<Task<?>> setQueryDisplays(
+ List<Task<?>> tasks,
+ Set<Task<?>> visited) {
+ List<Task<?>> childTasks = new ArrayList<>();
+ for (Task<?> task : tasks) {
if (visited.contains(task)) {
continue;
}
@@ -2103,7 +2103,7 @@ public class Driver implements IDriver {
* @param cxt
* the driver context
*/
- private TaskRunner launchTask(Task<? extends Serializable> tsk, String queryId, boolean noName,
+ private TaskRunner launchTask(Task<?> tsk, String queryId, boolean noName,
String jobname, int jobs, DriverContext cxt) throws HiveException {
if (SessionState.get() != null) {
SessionState.get().getHiveHistory().startTask(queryId, tsk, tsk.getClass().getName());
@@ -2437,7 +2437,7 @@ public class Driver implements IDriver {
public boolean hasResultSet() {
// TODO explain should use a FetchTask for reading
- for (Task<? extends Serializable> task : plan.getRootTasks()) {
+ for (Task<?> task : plan.getRootTasks()) {
if (task.getClass() == ExplainTask.class) {
return true;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
index d5392ab..1b8260a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
@@ -54,7 +54,7 @@ public class DriverContext {
private static final int SLEEP_TIME = 2000;
- private Queue<Task<? extends Serializable>> runnable;
+ private Queue<Task<?>> runnable;
private Queue<TaskRunner> running;
// how many jobs have been started
@@ -69,7 +69,7 @@ public class DriverContext {
}
public DriverContext(Context ctx) {
- this.runnable = new ConcurrentLinkedQueue<Task<? extends Serializable>>();
+ this.runnable = new ConcurrentLinkedQueue<Task<?>>();
this.running = new LinkedBlockingQueue<TaskRunner>();
this.ctx = ctx;
}
@@ -82,7 +82,7 @@ public class DriverContext {
return !shutdown && (!running.isEmpty() || !runnable.isEmpty());
}
- public synchronized void remove(Task<? extends Serializable> task) {
+ public synchronized void remove(Task<?> task) {
runnable.remove(task);
}
@@ -91,7 +91,7 @@ public class DriverContext {
running.add(runner);
}
- public synchronized Task<? extends Serializable> getRunnable(int maxthreads) throws HiveException {
+ public synchronized Task<?> getRunnable(int maxthreads) throws HiveException {
checkShutdown();
if (runnable.peek() != null && running.size() < maxthreads) {
return runnable.remove();
@@ -161,13 +161,13 @@ public class DriverContext {
* @return true if the task is launchable, false otherwise
*/
- public static boolean isLaunchable(Task<? extends Serializable> tsk) {
+ public static boolean isLaunchable(Task<?> tsk) {
// A launchable task is one that hasn't been queued, hasn't been
// initialized, and is runnable.
return tsk.isNotInitialized() && tsk.isRunnable();
}
- public synchronized boolean addToRunnable(Task<? extends Serializable> tsk) throws HiveException {
+ public synchronized boolean addToRunnable(Task<?> tsk) throws HiveException {
if (runnable.contains(tsk)) {
return false;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/HookRunner.java b/ql/src/java/org/apache/hadoop/hive/ql/HookRunner.java
index a3105b6..2ba170b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/HookRunner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/HookRunner.java
@@ -228,7 +228,7 @@ public class HookRunner {
}
public void runPostAnalyzeHooks(HiveSemanticAnalyzerHookContext hookCtx,
- List<Task<? extends Serializable>> allRootTasks) throws HiveException {
+ List<Task<?>> allRootTasks) throws HiveException {
initialize();
try {
for (HiveSemanticAnalyzerHook hook : saHooks) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
index 79cfd84..ddeb954 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
@@ -55,14 +55,14 @@ public class QueryDisplay {
private final LinkedHashMap<String, TaskDisplay> tasks = new LinkedHashMap<String, TaskDisplay>();
- public synchronized <T extends Serializable> void updateTaskStatus(Task<T> tTask) {
+ public void updateTaskStatus(Task<?> tTask) {
if (!tasks.containsKey(tTask.getId())) {
tasks.put(tTask.getId(), new TaskDisplay(tTask));
}
tasks.get(tTask.getId()).updateStatus(tTask);
}
- public synchronized <T extends Serializable> void updateTaskStatistics(MapRedStats mapRedStats,
+ public synchronized void updateTaskStatistics(MapRedStats mapRedStats,
RunningJob rj, String taskId) throws IOException, JSONException {
if (tasks.containsKey(taskId)) {
tasks.get(taskId).updateMapRedStatsJson(mapRedStats, rj);
@@ -232,7 +232,7 @@ public class QueryDisplay {
return externalHandle;
}
- public synchronized <T extends Serializable> void updateStatus(Task<T> tTask) {
+ public void updateStatus(Task<?> tTask) {
this.taskState = tTask.getTaskState();
if (externalHandle == null && tTask.getExternalHandle() != null) {
this.externalHandle = tTask.getExternalHandle();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
index 387fe61..0d7b92d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
@@ -76,7 +76,7 @@ public class QueryPlan implements Serializable {
private String optimizedCBOPlan;
private String optimizedQueryString;
- private List<Task<? extends Serializable>> rootTasks;
+ private List<Task<?>> rootTasks;
private FetchTask fetchTask;
private final List<ReducerTimeStatsPerJob> reducerTimeStatsPerJobList;
@@ -134,7 +134,7 @@ public class QueryPlan implements Serializable {
HiveOperation operation, Schema resultSchema) {
this.queryString = queryString;
- rootTasks = new ArrayList<Task<? extends Serializable>>(sem.getAllRootTasks());
+ rootTasks = new ArrayList<Task<?>>(sem.getAllRootTasks());
reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
fetchTask = sem.getFetchTask();
// Note that inputs and outputs can be changed when the query gets executed
@@ -264,12 +264,12 @@ public class QueryPlan implements Serializable {
query.setStageGraph(new org.apache.hadoop.hive.ql.plan.api.Graph());
query.getStageGraph().setNodeType(NodeType.STAGE);
- Queue<Task<? extends Serializable>> tasksToVisit =
- new LinkedList<Task<? extends Serializable>>();
- Set<Task<? extends Serializable>> tasksVisited = new HashSet<Task<? extends Serializable>>();
+ Queue<Task<?>> tasksToVisit =
+ new LinkedList<Task<?>>();
+ Set<Task<?>> tasksVisited = new HashSet<Task<?>>();
tasksToVisit.addAll(rootTasks);
while (tasksToVisit.size() != 0) {
- Task<? extends Serializable> task = tasksToVisit.remove();
+ Task<?> task = tasksToVisit.remove();
tasksVisited.add(task);
// populate stage
org.apache.hadoop.hive.ql.plan.api.Stage stage =
@@ -315,14 +315,14 @@ public class QueryPlan implements Serializable {
listEntry.setNode(task.getId());
ConditionalTask t = (ConditionalTask) task;
- for (Task<? extends Serializable> listTask : t.getListTasks()) {
+ for (Task<?> listTask : t.getListTasks()) {
if (t.getChildTasks() != null) {
org.apache.hadoop.hive.ql.plan.api.Adjacency childEntry =
new org.apache.hadoop.hive.ql.plan.api.Adjacency();
childEntry.setAdjacencyType(AdjacencyType.DISJUNCTIVE);
childEntry.setNode(listTask.getId());
// done processing the task
- for (Task<? extends Serializable> childTask : t.getChildTasks()) {
+ for (Task<?> childTask : t.getChildTasks()) {
childEntry.addToChildren(childTask.getId());
if (!tasksVisited.contains(childTask)) {
tasksToVisit.add(childTask);
@@ -343,7 +343,7 @@ public class QueryPlan implements Serializable {
entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE);
entry.setNode(task.getId());
// done processing the task
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
entry.addToChildren(childTask.getId());
if (!tasksVisited.contains(childTask)) {
tasksToVisit.add(childTask);
@@ -399,17 +399,17 @@ public class QueryPlan implements Serializable {
* Extract all the counters from tasks and operators.
*/
private void extractCounters() throws IOException {
- Queue<Task<? extends Serializable>> tasksToVisit =
- new LinkedList<Task<? extends Serializable>>();
- Set<Task<? extends Serializable>> tasksVisited =
- new HashSet<Task<? extends Serializable>>();
+ Queue<Task<?>> tasksToVisit =
+ new LinkedList<Task<?>>();
+ Set<Task<?>> tasksVisited =
+ new HashSet<Task<?>>();
tasksToVisit.addAll(rootTasks);
while (tasksToVisit.peek() != null) {
- Task<? extends Serializable> task = tasksToVisit.remove();
+ Task<?> task = tasksToVisit.remove();
tasksVisited.add(task);
// add children to tasksToVisit
if (task.getChildTasks() != null) {
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
if (!tasksVisited.contains(childTask)) {
tasksToVisit.add(childTask);
}
@@ -450,7 +450,7 @@ public class QueryPlan implements Serializable {
}
} else if (task instanceof ConditionalTask) {
ConditionalTask cTask = (ConditionalTask) task;
- for (Task<? extends Serializable> listTask : cTask.getListTasks()) {
+ for (Task<?> listTask : cTask.getListTasks()) {
if (!tasksVisited.contains(listTask)) {
tasksToVisit.add(listTask);
}
@@ -696,11 +696,11 @@ public class QueryPlan implements Serializable {
return done;
}
- public List<Task<? extends Serializable>> getRootTasks() {
+ public List<Task<?>> getRootTasks() {
return rootTasks;
}
- public void setRootTasks(List<Task<? extends Serializable>> rootTasks) {
+ public void setRootTasks(List<Task<?>> rootTasks) {
this.rootTasks = rootTasks;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
index c2b058a..ecdf368 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java
@@ -33,11 +33,11 @@ import org.apache.hadoop.hive.ql.plan.api.StageType;
public class ConditionalTask extends Task<ConditionalWork> implements Serializable {
private static final long serialVersionUID = 1L;
- private List<Task<? extends Serializable>> listTasks;
+ private List<Task<?>> listTasks;
private boolean resolved = false;
- private List<Task<? extends Serializable>> resTasks;
+ private List<Task<?>> resTasks;
private ConditionalResolver resolver;
private Object resolverCtx;
@@ -49,7 +49,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
@Override
public boolean isMapRedTask() {
- for (Task<? extends Serializable> task : listTasks) {
+ for (Task<?> task : listTasks) {
if (task.isMapRedTask()) {
return true;
}
@@ -65,7 +65,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
@Override
public boolean hasReduce() {
- for (Task<? extends Serializable> task : listTasks) {
+ for (Task<?> task : listTasks) {
if (task.hasReduce()) {
return true;
}
@@ -89,7 +89,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
}
private void resolveTask(DriverContext driverContext) throws HiveException {
- for (Task<? extends Serializable> tsk : getListTasks()) {
+ for (Task<?> tsk : getListTasks()) {
if (!resTasks.contains(tsk)) {
driverContext.remove(tsk);
console.printInfo(tsk.getId() + " is filtered out by condition resolver.");
@@ -101,7 +101,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
} else {
if (getParentTasks() != null) {
// This makes it so that we can go back up the tree later
- for (Task<? extends Serializable> task : getParentTasks()) {
+ for (Task<?> task : getParentTasks()) {
task.addDependentTask(tsk);
}
}
@@ -140,20 +140,20 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
@Override
public boolean done() {
boolean ret = true;
- List<Task<? extends Serializable>> parentTasks = getParentTasks();
+ List<Task<?>> parentTasks = getParentTasks();
if (parentTasks != null) {
- for (Task<? extends Serializable> par : parentTasks) {
+ for (Task<?> par : parentTasks) {
ret = ret && par.done();
}
}
- List<Task<? extends Serializable>> retTasks;
+ List<Task<?>> retTasks;
if (resolved) {
retTasks = resTasks;
} else {
retTasks = getListTasks();
}
if (ret && retTasks != null) {
- for (Task<? extends Serializable> tsk : retTasks) {
+ for (Task<?> tsk : retTasks) {
ret = ret && tsk.done();
}
}
@@ -171,7 +171,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
/**
* @return the listTasks
*/
- public List<Task<? extends Serializable>> getListTasks() {
+ public List<Task<?>> getListTasks() {
return listTasks;
}
@@ -179,7 +179,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
* @param listTasks
* the listTasks to set
*/
- public void setListTasks(List<Task<? extends Serializable>> listTasks) {
+ public void setListTasks(List<Task<?>> listTasks) {
this.listTasks = listTasks;
}
@@ -200,11 +200,11 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
* @return true if the task got added false if it already existed
*/
@Override
- public boolean addDependentTask(Task<? extends Serializable> dependent) {
+ public boolean addDependentTask(Task<?> dependent) {
boolean ret = false;
if (getListTasks() != null) {
ret = true;
- for (Task<? extends Serializable> tsk : getListTasks()) {
+ for (Task<?> tsk : getListTasks()) {
ret = ret & tsk.addDependentTask(dependent);
}
}
@@ -212,7 +212,7 @@ public class ConditionalTask extends Task<ConditionalWork> implements Serializab
}
@Override
- public List<Task<? extends Serializable>> getDependentTasks() {
+ public List<Task<?>> getDependentTasks() {
return listTasks;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java
index f76bfdd..fc05837 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/NodeUtils.java
@@ -34,7 +34,7 @@ import java.util.Set;
public class NodeUtils {
- public static <T> void iterateTask(Collection<Task<? extends Serializable>> tasks, Class<T> clazz, Function<T> function) {
+ public static <T> void iterateTask(Collection<Task<?>> tasks, Class<T> clazz, Function<T> function) {
// Does a breadth first traversal of the tasks
Set<Task> visited = new HashSet<Task>();
while (!tasks.isEmpty()) {
@@ -43,7 +43,7 @@ public class NodeUtils {
return;
}
- private static <T> Collection<Task<? extends Serializable>> iterateTask(Collection<Task<?>> tasks,
+ private static <T> Collection<Task<?>> iterateTask(Collection<Task<?>> tasks,
Class<T> clazz,
Function<T> function,
Set<Task> visited) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
index f542105..8eea9cf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java
@@ -64,8 +64,8 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
protected transient DriverContext driverContext;
protected transient boolean clonedConf = false;
protected transient String jobID;
- protected Task<? extends Serializable> backupTask;
- protected List<Task<? extends Serializable>> backupChildrenTasks = new ArrayList<Task<? extends Serializable>>();
+ protected Task<?> backupTask;
+ protected List<Task<?>> backupChildrenTasks = new ArrayList<Task<?>>();
protected static transient Logger LOG = LoggerFactory.getLogger(Task.class);
protected int taskTag;
private boolean isLocalMode =false;
@@ -84,7 +84,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
public static final int CONVERTED_SORTMERGEJOIN = 8;
public QueryDisplay queryDisplay = null;
// Descendants tasks who subscribe feeds from this task
- protected transient List<Task<? extends Serializable>> feedSubscribers;
+ protected transient List<Task<?>> feedSubscribers;
protected String id;
protected T work;
@@ -138,8 +138,8 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
protected boolean rootTask;
- protected List<Task<? extends Serializable>> childTasks;
- protected List<Task<? extends Serializable>> parentTasks;
+ protected List<Task<?>> childTasks;
+ protected List<Task<?>> parentTasks;
/**
* this can be set by the Task, to provide more info about the failure in TaskResult
* where the Driver can find it. This is checked if {@link Task#execute(org.apache.hadoop.hive.ql.DriverContext)}
@@ -235,7 +235,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
this.rootTask = rootTask;
}
- public void setChildTasks(List<Task<? extends Serializable>> childTasks) {
+ public void setChildTasks(List<Task<?>> childTasks) {
this.childTasks = childTasks;
}
@@ -244,7 +244,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
return getChildTasks();
}
- public List<Task<? extends Serializable>> getChildTasks() {
+ public List<Task<?>> getChildTasks() {
return childTasks;
}
@@ -252,11 +252,11 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
return childTasks == null ? 0 : childTasks.size();
}
- public void setParentTasks(List<Task<? extends Serializable>> parentTasks) {
+ public void setParentTasks(List<Task<?>> parentTasks) {
this.parentTasks = parentTasks;
}
- public List<Task<? extends Serializable>> getParentTasks() {
+ public List<Task<?>> getParentTasks() {
return parentTasks;
}
@@ -264,27 +264,27 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
return parentTasks == null ? 0 : parentTasks.size();
}
- public Task<? extends Serializable> getBackupTask() {
+ public Task<?> getBackupTask() {
return backupTask;
}
- public void setBackupTask(Task<? extends Serializable> backupTask) {
+ public void setBackupTask(Task<?> backupTask) {
this.backupTask = backupTask;
}
- public List<Task<? extends Serializable>> getBackupChildrenTasks() {
+ public List<Task<?>> getBackupChildrenTasks() {
return backupChildrenTasks;
}
- public void setBackupChildrenTasks(List<Task<? extends Serializable>> backupChildrenTasks) {
+ public void setBackupChildrenTasks(List<Task<?>> backupChildrenTasks) {
this.backupChildrenTasks = backupChildrenTasks;
}
- public Task<? extends Serializable> getAndInitBackupTask() {
+ public Task<?> getAndInitBackupTask() {
if (backupTask != null) {
// first set back the backup task with its children task.
if( backupChildrenTasks!= null) {
- for (Task<? extends Serializable> backupChild : backupChildrenTasks) {
+ for (Task<?> backupChild : backupChildrenTasks) {
backupChild.getParentTasks().add(backupTask);
}
}
@@ -297,17 +297,17 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
public void removeFromChildrenTasks() {
- List<Task<? extends Serializable>> childrenTasks = this.getChildTasks();
+ List<Task<?>> childrenTasks = this.getChildTasks();
if (childrenTasks == null) {
return;
}
- for (Task<? extends Serializable> childTsk : childrenTasks) {
+ for (Task<?> childTsk : childrenTasks) {
// remove this task from its children tasks
childTsk.getParentTasks().remove(this);
// recursively remove non-parent task from its children
- List<Task<? extends Serializable>> siblingTasks = childTsk.getParentTasks();
+ List<Task<?>> siblingTasks = childTsk.getParentTasks();
if (siblingTasks == null || siblingTasks.size() == 0) {
childTsk.removeFromChildrenTasks();
}
@@ -321,7 +321,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
*
* @return a list of tasks that are dependent on this task.
*/
- public List<Task<? extends Serializable>> getDependentTasks() {
+ public List<Task<?>> getDependentTasks() {
return getChildTasks();
}
@@ -331,16 +331,16 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
*
* @return true if the task got added false if it already existed
*/
- public boolean addDependentTask(Task<? extends Serializable> dependent) {
+ public boolean addDependentTask(Task<?> dependent) {
boolean ret = false;
if (getChildTasks() == null) {
- setChildTasks(new ArrayList<Task<? extends Serializable>>());
+ setChildTasks(new ArrayList<Task<?>>());
}
if (!getChildTasks().contains(dependent)) {
ret = true;
getChildTasks().add(dependent);
if (dependent.getParentTasks() == null) {
- dependent.setParentTasks(new ArrayList<Task<? extends Serializable>>());
+ dependent.setParentTasks(new ArrayList<Task<?>>());
}
if (!dependent.getParentTasks().contains(this)) {
dependent.getParentTasks().add(this);
@@ -350,7 +350,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
}
@SuppressWarnings({"unchecked", "rawtypes"})
- public static List<Task<? extends Serializable>>
+ public static List<Task<?>>
findLeafs(List<Task<? extends Serializable>> rootTasks) {
final List<Task<? extends Serializable>> leafTasks = new ArrayList<Task<?>>();
@@ -372,7 +372,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
* @param dependent
* the task to remove
*/
- public void removeDependentTask(Task<? extends Serializable> dependent) {
+ public void removeDependentTask(Task<?> dependent) {
if ((getChildTasks() != null) && (getChildTasks().contains(dependent))) {
getChildTasks().remove(dependent);
if ((dependent.getParentTasks() != null) && (dependent.getParentTasks().contains(this))) {
@@ -421,7 +421,7 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
public boolean isRunnable() {
boolean isrunnable = true;
if (parentTasks != null) {
- for (Task<? extends Serializable> parent : parentTasks) {
+ for (Task<?> parent : parentTasks) {
if (!parent.done()) {
isrunnable = false;
break;
@@ -509,23 +509,23 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
* @param publisher
* this feed provider.
*/
- public void subscribeFeed(Task<? extends Serializable> publisher) {
+ public void subscribeFeed(Task<?> publisher) {
if (publisher != this && publisher.ancestorOrSelf(this)) {
if (publisher.getFeedSubscribers() == null) {
- publisher.setFeedSubscribers(new LinkedList<Task<? extends Serializable>>());
+ publisher.setFeedSubscribers(new LinkedList<Task<?>>());
}
publisher.getFeedSubscribers().add(this);
}
}
// return true if this task is an ancestor of itself of parameter desc
- private boolean ancestorOrSelf(Task<? extends Serializable> desc) {
+ private boolean ancestorOrSelf(Task<?> desc) {
if (this == desc) {
return true;
}
- List<Task<? extends Serializable>> deps = getDependentTasks();
+ List<Task<?>> deps = getDependentTasks();
if (deps != null) {
- for (Task<? extends Serializable> d : deps) {
+ for (Task<?> d : deps) {
if (d.ancestorOrSelf(desc)) {
return true;
}
@@ -534,18 +534,18 @@ public abstract class Task<T extends Serializable> implements Serializable, Node
return false;
}
- public List<Task<? extends Serializable>> getFeedSubscribers() {
+ public List<Task<?>> getFeedSubscribers() {
return feedSubscribers;
}
- public void setFeedSubscribers(List<Task<? extends Serializable>> s) {
+ public void setFeedSubscribers(List<Task<?>> s) {
feedSubscribers = s;
}
// push the feed to its subscribers
protected void pushFeed(FeedType feedType, Object feedValue) {
if (feedSubscribers != null) {
- for (Task<? extends Serializable> s : feedSubscribers) {
+ for (Task<?> s : feedSubscribers) {
s.receiveFeed(feedType, feedValue);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
index f707264..d6b266e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
@@ -181,12 +181,12 @@ public final class TaskFactory {
@SafeVarargs
public static void makeChild(Task<?> ret,
- Task<? extends Serializable>... tasklist) {
+ Task<?>... tasklist) {
// Add the new task as child of each of the passed in tasks
- for (Task<? extends Serializable> tsk : tasklist) {
- List<Task<? extends Serializable>> children = tsk.getChildTasks();
+ for (Task<?> tsk : tasklist) {
+ List<Task<?>> children = tsk.getChildTasks();
if (children == null) {
- children = new ArrayList<Task<? extends Serializable>>();
+ children = new ArrayList<Task<?>>();
}
children.add(ret);
tsk.setChildTasks(children);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java
index 13010ae..a5554c3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskRunner.java
@@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory;
**/
public class TaskRunner extends Thread {
- protected Task<? extends Serializable> tsk;
+ protected Task<?> tsk;
protected TaskResult result;
protected SessionState ss;
private static AtomicLong taskCounter = new AtomicLong(0);
@@ -50,14 +50,14 @@ public class TaskRunner extends Thread {
private final DriverContext driverCtx;
- public TaskRunner(Task<? extends Serializable> tsk, DriverContext ctx) {
+ public TaskRunner(Task<?> tsk, DriverContext ctx) {
this.tsk = tsk;
this.result = new TaskResult();
ss = SessionState.get();
driverCtx = ctx;
}
- public Task<? extends Serializable> getTask() {
+ public Task<?> getTask() {
return tsk;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 47ee9d8..7fd42c1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -2599,24 +2599,24 @@ public final class Utilities {
return true;
}
- public static List<TezTask> getTezTasks(List<Task<? extends Serializable>> tasks) {
+ public static List<TezTask> getTezTasks(List<Task<?>> tasks) {
return getTasks(tasks, new TaskFilterFunction<>(TezTask.class));
}
- public static List<SparkTask> getSparkTasks(List<Task<? extends Serializable>> tasks) {
+ public static List<SparkTask> getSparkTasks(List<Task<?>> tasks) {
return getTasks(tasks, new TaskFilterFunction<>(SparkTask.class));
}
- public static List<ExecDriver> getMRTasks(List<Task<? extends Serializable>> tasks) {
+ public static List<ExecDriver> getMRTasks(List<Task<?>> tasks) {
return getTasks(tasks, new TaskFilterFunction<>(ExecDriver.class));
}
- public static int getNumClusterJobs(List<Task<? extends Serializable>> tasks) {
+ public static int getNumClusterJobs(List<Task<?>> tasks) {
return getMRTasks(tasks).size() + getTezTasks(tasks).size() + getSparkTasks(tasks).size();
}
static class TaskFilterFunction<T> implements DAGTraversal.Function {
- private Set<Task<? extends Serializable>> visited = new HashSet<>();
+ private Set<Task<?>> visited = new HashSet<>();
private Class<T> requiredType;
private List<T> typeSpecificTasks = new ArrayList<>();
@@ -2625,7 +2625,7 @@ public final class Utilities {
}
@Override
- public void process(Task<? extends Serializable> task) {
+ public void process(Task<?> task) {
if (requiredType.isInstance(task) && !typeSpecificTasks.contains(task)) {
typeSpecificTasks.add((T) task);
}
@@ -2637,12 +2637,12 @@ public final class Utilities {
}
@Override
- public boolean skipProcessing(Task<? extends Serializable> task) {
+ public boolean skipProcessing(Task<?> task) {
return visited.contains(task);
}
}
- private static <T> List<T> getTasks(List<Task<? extends Serializable>> tasks,
+ private static <T> List<T> getTasks(List<Task<?>> tasks,
TaskFilterFunction<T> function) {
DAGTraversal.traverse(tasks, function);
return function.getTasks();
@@ -2831,7 +2831,7 @@ public final class Utilities {
* @param conf
* @throws SemanticException
*/
- public static void reworkMapRedWork(Task<? extends Serializable> task,
+ public static void reworkMapRedWork(Task<?> task,
boolean reworkMapredWork, HiveConf conf) throws SemanticException {
if (reworkMapredWork && (task instanceof MapRedTask)) {
try {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
index d2ca33d..c365d41 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
@@ -64,7 +64,7 @@ public class HadoopJobExecHelper {
static final private org.slf4j.Logger LOG = LoggerFactory.getLogger(HadoopJobExecHelper.class.getName());
protected transient JobConf job;
- protected Task<? extends Serializable> task;
+ protected Task<?> task;
protected transient int mapProgress = -1;
protected transient int reduceProgress = -1;
@@ -142,7 +142,7 @@ public class HadoopJobExecHelper {
}
public HadoopJobExecHelper(JobConf job, LogHelper console,
- Task<? extends Serializable> task, HadoopJobExecHook hookCallBack) {
+ Task<?> task, HadoopJobExecHook hookCallBack) {
this.queryId = HiveConf.getVar(job, HiveConf.ConfVars.HIVEQUERYID, "unknown-" + System.currentTimeMillis());
this.job = job;
this.console = console;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java
index 0ee7425..1af9227 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ExternalTableCopyTaskBuilder.java
@@ -56,8 +56,8 @@ public class ExternalTableCopyTaskBuilder {
this.conf = conf;
}
- List<Task<? extends Serializable>> tasks(TaskTracker tracker) {
- List<Task<? extends Serializable>> tasks = new ArrayList<>();
+ List<Task<?>> tasks(TaskTracker tracker) {
+ List<Task<?>> tasks = new ArrayList<>();
Iterator<DirCopyWork> itr = work.getPathsToCopyIterator();
while (tracker.canAddMoreTasks() && itr.hasNext()) {
DirCopyWork dirCopyWork = itr.next();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java
index 4e6be1c..9a541d2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java
@@ -93,12 +93,12 @@ public class ReplLoadTask extends Task<ReplLoadWork> implements Serializable {
*/
private static class Scope {
boolean database = false, table = false, partition = false;
- List<Task<? extends Serializable>> rootTasks = new ArrayList<>();
+ List<Task<?>> rootTasks = new ArrayList<>();
}
@Override
public int execute(DriverContext driverContext) {
- Task<? extends Serializable> rootTask = work.getRootTask();
+ Task<?> rootTask = work.getRootTask();
if (rootTask != null) {
rootTask.setChildTasks(null);
}
@@ -466,19 +466,19 @@ public class ReplLoadTask extends Task<ReplLoadWork> implements Serializable {
*/
private void setUpDependencies(TaskTracker parentTasks, TaskTracker childTasks) {
if (parentTasks.hasTasks()) {
- for (Task<? extends Serializable> parentTask : parentTasks.tasks()) {
- for (Task<? extends Serializable> childTask : childTasks.tasks()) {
+ for (Task<?> parentTask : parentTasks.tasks()) {
+ for (Task<?> childTask : childTasks.tasks()) {
parentTask.addDependentTask(childTask);
}
}
} else {
- for (Task<? extends Serializable> childTask : childTasks.tasks()) {
+ for (Task<?> childTask : childTasks.tasks()) {
parentTasks.addTask(childTask);
}
}
}
- private void createBuilderTask(List<Task<? extends Serializable>> rootTasks) {
+ private void createBuilderTask(List<Task<?>> rootTasks) {
// Use loadTask as dependencyCollection
Task<ReplLoadWork> loadTask = TaskFactory.get(work, conf);
DAGTraversal.traverse(rootTasks, new AddDependencyToLeaves(loadTask));
@@ -507,7 +507,7 @@ public class ReplLoadTask extends Task<ReplLoadWork> implements Serializable {
}
}
- List<Task<? extends Serializable>> childTasks = new ArrayList<>();
+ List<Task<?>> childTasks = new ArrayList<>();
int maxTasks = conf.getIntVar(HiveConf.ConfVars.REPL_APPROX_MAX_LOAD_TASKS);
// First start the distcp tasks to copy the files related to external table. The distcp tasks should be
@@ -548,7 +548,7 @@ public class ReplLoadTask extends Task<ReplLoadWork> implements Serializable {
AlterDatabaseSetPropertiesDesc alterDbDesc =
new AlterDatabaseSetPropertiesDesc(dbName, mapProp,
new ReplicationSpec(lastEventid, lastEventid));
- Task<? extends Serializable> updateReplIdTask =
+ Task<?> updateReplIdTask =
TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc), conf);
DAGTraversal.traverse(childTasks, new AddDependencyToLeaves(updateReplIdTask));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadWork.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadWork.java
index 1d63cd8..44d2535 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadWork.java
@@ -53,7 +53,7 @@ public class ReplLoadWork implements Serializable {
private DatabaseEvent.State state = null;
private final transient BootstrapEventsIterator bootstrapIterator;
private transient IncrementalLoadTasksBuilder incrementalLoadTasksBuilder;
- private transient Task<? extends Serializable> rootTask;
+ private transient Task<?> rootTask;
private final transient Iterator<DirCopyWork> pathsToCopyIterator;
/*
@@ -143,11 +143,11 @@ public class ReplLoadWork implements Serializable {
return incrementalLoadTasksBuilder;
}
- public Task<? extends Serializable> getRootTask() {
+ public Task<?> getRootTask() {
return rootTask;
}
- public void setRootTask(Task<? extends Serializable> rootTask) {
+ public void setRootTask(Task<?> rootTask) {
this.rootTask = rootTask;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadConstraint.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadConstraint.java
index d603e69..bc12b26 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadConstraint.java
@@ -85,7 +85,7 @@ public class LoadConstraint {
String fksString = json.getString("fks");
String uksString = json.getString("uks");
String nnsString = json.getString("nns");
- List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> tasks = new ArrayList<Task<?>>();
if (pksString != null && !pksString.isEmpty() && !isPrimaryKeysAlreadyLoaded(pksString)) {
AddPrimaryKeyHandler pkHandler = new AddPrimaryKeyHandler();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
index c5378b4..52777f3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
@@ -60,7 +60,7 @@ public class LoadDatabase {
public TaskTracker tasks() throws Exception {
Database dbInMetadata = readDbMetadata();
String dbName = dbInMetadata.getName();
- Task<? extends Serializable> dbRootTask = null;
+ Task<?> dbRootTask = null;
ReplLoadOpType loadDbType = getLoadDbType(dbName);
switch (loadDbType) {
case LOAD_NEW:
@@ -115,7 +115,7 @@ public class LoadDatabase {
return allTables.isEmpty() && allFunctions.isEmpty();
}
- private Task<? extends Serializable> createDbTask(Database dbObj) {
+ private Task<?> createDbTask(Database dbObj) {
// note that we do not set location - for repl load, we want that auto-created.
CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getName(), dbObj.getDescription(), null, false,
updateDbProps(dbObj, context.dumpDirectory));
@@ -126,12 +126,12 @@ public class LoadDatabase {
return TaskFactory.get(work, context.hiveConf);
}
- private Task<? extends Serializable> alterDbTask(Database dbObj) {
+ private Task<?> alterDbTask(Database dbObj) {
return alterDbTask(dbObj.getName(), updateDbProps(dbObj, context.dumpDirectory),
context.hiveConf);
}
- private Task<? extends Serializable> setOwnerInfoTask(Database dbObj) {
+ private Task<?> setOwnerInfoTask(Database dbObj) {
AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getName(),
new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null);
DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
@@ -160,7 +160,7 @@ public class LoadDatabase {
return parameters;
}
- private static Task<? extends Serializable> alterDbTask(String dbName, Map<String, String> props,
+ private static Task<?> alterDbTask(String dbName, Map<String, String> props,
HiveConf hiveConf) {
AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(dbName, props, null);
DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadFunction.java
index 2c053ef..19e1a8b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadFunction.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadFunction.java
@@ -65,7 +65,7 @@ public class LoadFunction {
this.tracker = new TaskTracker(existingTracker);
}
- private void createFunctionReplLogTask(List<Task<? extends Serializable>> functionTasks,
+ private void createFunctionReplLogTask(List<Task<?>> functionTasks,
String functionName) {
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, functionName);
Task<ReplStateLogWork> replLogTask = TaskFactory.get(replLogWork, context.hiveConf);
@@ -82,7 +82,7 @@ public class LoadFunction {
return tracker;
}
CreateFunctionHandler handler = new CreateFunctionHandler();
- List<Task<? extends Serializable>> tasks = handler.handle(
+ List<Task<?>> tasks = handler.handle(
new MessageHandler.Context(
dbNameToLoadIn, fromPath.toString(), null, null, context.hiveConf,
context.hiveDb, context.nestedContext, LOG)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java
index 40020ed..e451161 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java
@@ -117,7 +117,7 @@ public class LoadPartitions {
updateReplicationState(initialReplicationState());
if (!forNewTable().hasReplicationState()) {
// Add ReplStateLogTask only if no pending table load tasks left for next cycle
- Task<? extends Serializable> replLogTask
+ Task<?> replLogTask
= ReplUtils.getTableReplLogTask(tableDesc, replLogger, context.hiveConf);
tracker.addDependentTask(replLogTask);
}
@@ -131,7 +131,7 @@ public class LoadPartitions {
updateReplicationState(initialReplicationState());
if (!forExistingTable(lastReplicatedPartition).hasReplicationState()) {
// Add ReplStateLogTask only if no pending table load tasks left for next cycle
- Task<? extends Serializable> replLogTask
+ Task<?> replLogTask
= ReplUtils.getTableReplLogTask(tableDesc, replLogger, context.hiveConf);
tracker.addDependentTask(replLogTask);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java
index 02993fc..8da2b2e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java
@@ -151,7 +151,7 @@ public class LoadTable {
context.hiveConf
);
if (!isPartitioned(tableDesc)) {
- Task<? extends Serializable> replLogTask
+ Task<?> replLogTask
= ReplUtils.getTableReplLogTask(tableDesc, replLogger, context.hiveConf);
ckptTask.addDependentTask(replLogTask);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
index 964b792..ed75df8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java
@@ -89,10 +89,10 @@ public class IncrementalLoadTasksBuilder {
numIteration = 0;
}
- public Task<? extends Serializable> build(DriverContext driverContext, Hive hive, Logger log,
+ public Task<?> build(DriverContext driverContext, Hive hive, Logger log,
TaskTracker tracker) throws Exception {
- Task<? extends Serializable> evTaskRoot = TaskFactory.get(new DependencyCollectionWork());
- Task<? extends Serializable> taskChainTail = evTaskRoot;
+ Task<?> evTaskRoot = TaskFactory.get(new DependencyCollectionWork());
+ Task<?> taskChainTail = evTaskRoot;
Long lastReplayedEvent = null;
this.log = log;
numIteration++;
@@ -133,13 +133,13 @@ public class IncrementalLoadTasksBuilder {
MessageHandler.Context context = new MessageHandler.Context(dbName, location,
taskChainTail, eventDmd, conf, hive, driverContext.getCtx(), this.log);
- List<Task<? extends Serializable>> evTasks = analyzeEventLoad(context);
+ List<Task<?>> evTasks = analyzeEventLoad(context);
if ((evTasks != null) && (!evTasks.isEmpty())) {
ReplStateLogWork replStateLogWork = new ReplStateLogWork(replLogger,
dir.getPath().getName(),
eventDmd.getDumpType().toString());
- Task<? extends Serializable> barrierTask = TaskFactory.get(replStateLogWork, conf);
+ Task<?> barrierTask = TaskFactory.get(replStateLogWork, conf);
AddDependencyToLeaves function = new AddDependencyToLeaves(barrierTask);
DAGTraversal.traverse(evTasks, function);
this.log.debug("Updated taskChainTail from {}:{} to {}:{}",
@@ -152,14 +152,14 @@ public class IncrementalLoadTasksBuilder {
if (!hasMoreWork()) {
ReplRemoveFirstIncLoadPendFlagDesc desc = new ReplRemoveFirstIncLoadPendFlagDesc(dbName);
- Task<? extends Serializable> updateIncPendTask = TaskFactory.get(new DDLWork(inputs, outputs, desc), conf);
+ Task<?> updateIncPendTask = TaskFactory.get(new DDLWork(inputs, outputs, desc), conf);
taskChainTail.addDependentTask(updateIncPendTask);
taskChainTail = updateIncPendTask;
Map<String, String> dbProps = new HashMap<>();
dbProps.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), String.valueOf(lastReplayedEvent));
ReplStateLogWork replStateLogWork = new ReplStateLogWork(replLogger, dbProps);
- Task<? extends Serializable> barrierTask = TaskFactory.get(replStateLogWork, conf);
+ Task<?> barrierTask = TaskFactory.get(replStateLogWork, conf);
taskChainTail.addDependentTask(barrierTask);
this.log.debug("Added {}:{} as a precursor of barrier task {}:{}",
taskChainTail.getClass(), taskChainTail.getId(),
@@ -200,12 +200,12 @@ public class IncrementalLoadTasksBuilder {
}
}
- private List<Task<? extends Serializable>> analyzeEventLoad(MessageHandler.Context context) throws SemanticException {
+ private List<Task<?>> analyzeEventLoad(MessageHandler.Context context) throws SemanticException {
MessageHandler messageHandler = context.dmd.getDumpType().handler();
- List<Task<? extends Serializable>> tasks = messageHandler.handle(context);
+ List<Task<?>> tasks = messageHandler.handle(context);
if (context.precursor != null) {
- for (Task<? extends Serializable> t : tasks) {
+ for (Task<?> t : tasks) {
context.precursor.addDependentTask(t);
log.debug("Added {}:{} as a precursor of {}:{}",
context.precursor.getClass(), context.precursor.getId(), t.getClass(), t.getId());
@@ -217,9 +217,9 @@ public class IncrementalLoadTasksBuilder {
return addUpdateReplStateTasks(messageHandler.getUpdatedMetadata(), tasks);
}
- private Task<? extends Serializable> getMigrationCommitTxnTask(String dbName, String tableName,
+ private Task<?> getMigrationCommitTxnTask(String dbName, String tableName,
List<Map <String, String>> partSpec, String replState,
- Task<? extends Serializable> preCursor) throws SemanticException {
+ Task<?> preCursor) throws SemanticException {
ReplLastIdInfo replLastIdInfo = new ReplLastIdInfo(dbName, Long.parseLong(replState));
replLastIdInfo.setTable(tableName);
if (partSpec != null && !partSpec.isEmpty()) {
@@ -234,7 +234,7 @@ public class IncrementalLoadTasksBuilder {
replLastIdInfo.setPartitionList(partitionList);
}
- Task<? extends Serializable> updateReplIdTxnTask = TaskFactory.get(new ReplTxnWork(replLastIdInfo, ReplTxnWork
+ Task<?> updateReplIdTxnTask = TaskFactory.get(new ReplTxnWork(replLastIdInfo, ReplTxnWork
.OperationType.REPL_MIGRATION_COMMIT_TXN), conf);
if (preCursor != null) {
@@ -245,9 +245,9 @@ public class IncrementalLoadTasksBuilder {
return updateReplIdTxnTask;
}
- private Task<? extends Serializable> tableUpdateReplStateTask(String dbName, String tableName,
+ private Task<?> tableUpdateReplStateTask(String dbName, String tableName,
Map<String, String> partSpec, String replState,
- Task<? extends Serializable> preCursor) throws SemanticException {
+ Task<?> preCursor) throws SemanticException {
HashMap<String, String> mapProp = new HashMap<>();
mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState);
@@ -255,7 +255,7 @@ public class IncrementalLoadTasksBuilder {
AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(fqTableName, partSpec,
new ReplicationSpec(replState, replState), false, mapProp, false, false, null);
- Task<? extends Serializable> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterTblDesc), conf);
+ Task<?> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterTblDesc), conf);
// Link the update repl state task with dependency collection task
if (preCursor != null) {
@@ -266,14 +266,14 @@ public class IncrementalLoadTasksBuilder {
return updateReplIdTask;
}
- private Task<? extends Serializable> dbUpdateReplStateTask(String dbName, String replState,
- Task<? extends Serializable> preCursor) {
+ private Task<?> dbUpdateReplStateTask(String dbName, String replState,
+ Task<?> preCursor) {
HashMap<String, String> mapProp = new HashMap<>();
mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState);
AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(dbName, mapProp,
new ReplicationSpec(replState, replState));
- Task<? extends Serializable> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc), conf);
+ Task<?> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc), conf);
// Link the update repl state task with dependency collection task
if (preCursor != null) {
@@ -284,9 +284,9 @@ public class IncrementalLoadTasksBuilder {
return updateReplIdTask;
}
- private List<Task<? extends Serializable>> addUpdateReplStateTasks(
+ private List<Task<?>> addUpdateReplStateTasks(
UpdatedMetaDataTracker updatedMetaDataTracker,
- List<Task<? extends Serializable>> importTasks) throws SemanticException {
+ List<Task<?>> importTasks) throws SemanticException {
// If no import tasks generated by the event then no need to update the repl state to any object.
if (importTasks.isEmpty()) {
log.debug("No objects need update of repl state: 0 import tasks");
@@ -302,10 +302,10 @@ public class IncrementalLoadTasksBuilder {
}
// Create a barrier task for dependency collection of import tasks
- Task<? extends Serializable> barrierTask = TaskFactory.get(new DependencyCollectionWork(), conf);
+ Task<?> barrierTask = TaskFactory.get(new DependencyCollectionWork(), conf);
- List<Task<? extends Serializable>> tasks = new ArrayList<>();
- Task<? extends Serializable> updateReplIdTask;
+ List<Task<?>> tasks = new ArrayList<>();
+ Task<?> updateReplIdTask;
for (UpdatedMetaDataTracker.UpdateMetaData updateMetaData : updatedMetaDataTracker.getUpdateMetaDataList()) {
String replState = updateMetaData.getReplState();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/AddDependencyToLeaves.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/AddDependencyToLeaves.java
index 284796f..f0a6cfb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/AddDependencyToLeaves.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/AddDependencyToLeaves.java
@@ -26,26 +26,26 @@ import java.util.Collections;
import java.util.List;
public class AddDependencyToLeaves implements DAGTraversal.Function {
- private List<Task<? extends Serializable>> postDependencyCollectionTasks;
+ private List<Task<?>> postDependencyCollectionTasks;
- public AddDependencyToLeaves(List<Task<? extends Serializable>> postDependencyCollectionTasks) {
+ public AddDependencyToLeaves(List<Task<?>> postDependencyCollectionTasks) {
this.postDependencyCollectionTasks = postDependencyCollectionTasks;
}
- public AddDependencyToLeaves(Task<? extends Serializable> postDependencyTask) {
+ public AddDependencyToLeaves(Task<?> postDependencyTask) {
this(Collections.singletonList(postDependencyTask));
}
@Override
- public void process(Task<? extends Serializable> task) {
+ public void process(Task<?> task) {
if (task.getChildTasks() == null) {
postDependencyCollectionTasks.forEach(task::addDependentTask);
}
}
@Override
- public boolean skipProcessing(Task<? extends Serializable> task) {
+ public boolean skipProcessing(Task<?> task) {
return postDependencyCollectionTasks.contains(task);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java
index 23127c9..98a0fa6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/ReplUtils.java
@@ -186,22 +186,22 @@ public class ReplUtils {
private static void addOpenTxnTaskForMigration(String actualDbName, String actualTblName,
HiveConf conf,
UpdatedMetaDataTracker updatedMetaDataTracker,
- List<Task<? extends Serializable>> taskList,
- Task<? extends Serializable> childTask) {
- Task<? extends Serializable> replTxnTask = TaskFactory.get(new ReplTxnWork(actualDbName, actualTblName,
+ List<Task<?>> taskList,
+ Task<?> childTask) {
+ Task<?> replTxnTask = TaskFactory.get(new ReplTxnWork(actualDbName, actualTblName,
ReplTxnWork.OperationType.REPL_MIGRATION_OPEN_TXN), conf);
replTxnTask.addDependentTask(childTask);
updatedMetaDataTracker.setNeedCommitTxn(true);
taskList.add(replTxnTask);
}
- public static List<Task<? extends Serializable>> addOpenTxnTaskForMigration(String actualDbName,
+ public static List<Task<?>> addOpenTxnTaskForMigration(String actualDbName,
String actualTblName, HiveConf conf,
UpdatedMetaDataTracker updatedMetaDataTracker,
- Task<? extends Serializable> childTask,
+ Task<?> childTask,
org.apache.hadoop.hive.metastore.api.Table tableObj)
throws IOException, TException {
- List<Task<? extends Serializable>> taskList = new ArrayList<>();
+ List<Task<?>> taskList = new ArrayList<>();
taskList.add(childTask);
if (isTableMigratingToTransactional(conf, tableObj) && updatedMetaDataTracker != null) {
addOpenTxnTaskForMigration(actualDbName, actualTblName, conf, updatedMetaDataTracker,
@@ -210,13 +210,13 @@ public class ReplUtils {
return taskList;
}
- public static List<Task<? extends Serializable>> addTasksForLoadingColStats(ColumnStatistics colStats,
+ public static List<Task<?>> addTasksForLoadingColStats(ColumnStatistics colStats,
HiveConf conf,
UpdatedMetaDataTracker updatedMetadata,
org.apache.hadoop.hive.metastore.api.Table tableObj,
long writeId)
throws IOException, TException {
- List<Task<? extends Serializable>> taskList = new ArrayList<>();
+ List<Task<?>> taskList = new ArrayList<>();
boolean isMigratingToTxn = ReplUtils.isTableMigratingToTransactional(conf, tableObj);
ColumnStatsUpdateWork work = new ColumnStatsUpdateWork(colStats, isMigratingToTxn);
work.setWriteId(writeId);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/TaskTracker.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/TaskTracker.java
index 20ede9c..83aea8b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/TaskTracker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/util/TaskTracker.java
@@ -39,7 +39,7 @@ public class TaskTracker {
* used to identify the list of tasks at root level for a given level like table / db / partition.
* this does not include the task dependency notion of "table tasks < ---- partition task"
*/
- private final List<Task<? extends Serializable>> tasks = new ArrayList<>();
+ private final List<Task<?>> tasks = new ArrayList<>();
private ReplicationState replicationState = null;
// since tasks themselves can be graphs we want to limit the number of created
// tasks including all of dependencies.
@@ -59,16 +59,16 @@ public class TaskTracker {
* the graph however might get created in a disjoint fashion, in which case we can just update
* the number of tasks using the "update" method.
*/
- public void addTask(Task<? extends Serializable> task) {
+ public void addTask(Task<?> task) {
tasks.add(task);
- List <Task<? extends Serializable>> visited = new ArrayList<>();
+ List <Task<?>> visited = new ArrayList<>();
updateTaskCount(task, visited);
}
- public void addTaskList(List <Task<? extends Serializable>> taskList) {
- List <Task<? extends Serializable>> visited = new ArrayList<>();
- for (Task<? extends Serializable> task : taskList) {
+ public void addTaskList(List <Task<?>> taskList) {
+ List <Task<?>> visited = new ArrayList<>();
+ for (Task<?> task : taskList) {
if (!visited.contains(task)) {
tasks.add(task);
updateTaskCount(task, visited);
@@ -78,23 +78,23 @@ public class TaskTracker {
// This method is used to traverse the DAG created in tasks list and add the dependent task to
// the tail of each task chain.
- public void addDependentTask(Task<? extends Serializable> dependent) {
+ public void addDependentTask(Task<?> dependent) {
if (tasks.isEmpty()) {
addTask(dependent);
} else {
DAGTraversal.traverse(tasks, new AddDependencyToLeaves(dependent));
- List<Task<? extends Serializable>> visited = new ArrayList<>();
+ List<Task<?>> visited = new ArrayList<>();
updateTaskCount(dependent, visited);
}
}
- private void updateTaskCount(Task<? extends Serializable> task,
- List <Task<? extends Serializable>> visited) {
+ private void updateTaskCount(Task<?> task,
+ List <Task<?>> visited) {
numberOfTasks += 1;
visited.add(task);
if (task.getChildTasks() != null) {
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
if (visited.contains(childTask)) {
continue;
}
@@ -130,7 +130,7 @@ public class TaskTracker {
return replicationState;
}
- public List<Task<? extends Serializable>> tasks() {
+ public List<Task<?>> tasks() {
return tasks;
}
@@ -142,4 +142,4 @@ public class TaskTracker {
public int numberOfTasks() {
return numberOfTasks;
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/util/DAGTraversal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/util/DAGTraversal.java
index 40f5f55..ae49def 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/util/DAGTraversal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/util/DAGTraversal.java
@@ -32,8 +32,8 @@ import java.util.Set;
* stack overflow's, hence iteration based.
*/
public class DAGTraversal {
- public static void traverse(List<Task<? extends Serializable>> tasks, Function function) {
- List<Task<? extends Serializable>> listOfTasks = new ArrayList<>(tasks);
+ public static void traverse(List<Task<?>> tasks, Function function) {
+ List<Task<?>> listOfTasks = new ArrayList<>(tasks);
while (!listOfTasks.isEmpty()) {
// HashSet will make sure that no duplicate children are added. If a task is added multiple
// time to the children list then it may cause the list to grow exponentially. Lets take an example of
@@ -49,8 +49,8 @@ public class DAGTraversal {
// the children list and in next iteration ev2.task1 will be added 3 times and ev2.task2 will be added
// 3 times. So in next iteration ev2.barrierTask will be added 6 times. As it goes like this, the next barrier
// task will be added 12-15 times and may reaches millions with large number of events.
- Set<Task<? extends Serializable>> children = new HashSet<>();
- for (Task<? extends Serializable> task : listOfTasks) {
+ Set<Task<?>> children = new HashSet<>();
+ for (Task<?> task : listOfTasks) {
// skip processing has to be done first before continuing
if (function.skipProcessing(task)) {
continue;
@@ -69,8 +69,8 @@ public class DAGTraversal {
}
public interface Function {
- void process(Task<? extends Serializable> task);
+ void process(Task<?> task);
- boolean skipProcessing(Task<? extends Serializable> task);
+ boolean skipProcessing(Task<?> task);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java
index 327628f..33bf6a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java
@@ -172,7 +172,7 @@ public interface HiveHistory {
*
* @param task
*/
- public void startTask(String queryId, Task<? extends Serializable> task,
+ public void startTask(String queryId, Task<?> task,
String taskName);
/**
@@ -180,7 +180,7 @@ public interface HiveHistory {
*
* @param task
*/
- public void endTask(String queryId, Task<? extends Serializable> task);
+ public void endTask(String queryId, Task<?> task);
/**
* Logs progress of a task if ConfVars.HIVE_LOG_INCREMENTAL_PLAN_PROGRESS is
@@ -188,7 +188,7 @@ public interface HiveHistory {
*
* @param task
*/
- public void progressTask(String queryId, Task<? extends Serializable> task);
+ public void progressTask(String queryId, Task<?> task);
/**
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
index 0bfa78d..80eaf00 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistoryImpl.java
@@ -263,7 +263,7 @@ public class HiveHistoryImpl implements HiveHistory{
}
@Override
- public void startTask(String queryId, Task<? extends Serializable> task,
+ public void startTask(String queryId, Task<?> task,
String taskName) {
TaskInfo ti = new TaskInfo();
@@ -279,7 +279,7 @@ public class HiveHistoryImpl implements HiveHistory{
}
@Override
- public void endTask(String queryId, Task<? extends Serializable> task) {
+ public void endTask(String queryId, Task<?> task) {
String id = queryId + ":" + task.getId();
TaskInfo ti = taskInfoMap.get(id);
@@ -291,7 +291,7 @@ public class HiveHistoryImpl implements HiveHistory{
}
@Override
- public void progressTask(String queryId, Task<? extends Serializable> task) {
+ public void progressTask(String queryId, Task<?> task) {
String id = queryId + ":" + task.getId();
TaskInfo ti = taskInfoMap.get(id);
if (ti == null) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java
index 82b8c68..9a6a2e9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/NoOperatorReuseCheckerHook.java
@@ -69,8 +69,8 @@ public class NoOperatorReuseCheckerHook implements ExecuteWithHookContext {
List<Node> rootOps = Lists.newArrayList();
- List<Task<? extends Serializable>> roots = hookContext.getQueryPlan().getRootTasks();
- for (Task<? extends Serializable> task : roots) {
+ List<Task<?>> roots = hookContext.getQueryPlan().getRootTasks();
+ for (Task<?> task : roots) {
Object work = task.getWork();
if (work instanceof MapredWork) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java b/ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java
index edb7716..23cdb62 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/lib/TaskGraphWalker.java
@@ -148,31 +148,31 @@ public class TaskGraphWalker implements GraphWalker {
opStack.push(nd);
}
- List<Task<? extends Serializable>> nextTaskList = null;
- Set<Task<? extends Serializable>> nextTaskSet = new HashSet<Task<? extends Serializable>>();
- List<Task<? extends Serializable>> taskListInConditionalTask = null;
+ List<Task<?>> nextTaskList = null;
+ Set<Task<?>> nextTaskSet = new HashSet<Task<?>>();
+ List<Task<?>> taskListInConditionalTask = null;
if(nd instanceof ConditionalTask ){
//for conditional task, next task list should return the children tasks of each task, which
//is contained in the conditional task.
taskListInConditionalTask = ((ConditionalTask) nd).getListTasks();
- for(Task<? extends Serializable> tsk: taskListInConditionalTask){
- List<Task<? extends Serializable>> childTask = tsk.getChildTasks();
+ for(Task<?> tsk: taskListInConditionalTask){
+ List<Task<?>> childTask = tsk.getChildTasks();
if(childTask != null){
nextTaskSet.addAll(tsk.getChildTasks());
}
}
//convert the set into list
if(nextTaskSet.size()>0){
- nextTaskList = new ArrayList<Task<? extends Serializable>>();
- for(Task<? extends Serializable> tsk:nextTaskSet ){
+ nextTaskList = new ArrayList<Task<?>>();
+ for(Task<?> tsk:nextTaskSet ){
nextTaskList.add(tsk);
}
}
}else{
//for other tasks, just return its children tasks
- nextTaskList = ((Task<? extends Serializable>)nd).getChildTasks();
+ nextTaskList = ((Task<?>)nd).getChildTasks();
}
if ((nextTaskList == null)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
index 25c6b24..10a0405 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
@@ -74,7 +74,7 @@ public class GenMRFileSink1 implements NodeProcessor {
Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(fsOp.getParentOperators().get(0));
- Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
+ Task<?> currTask = mapredCtx.getCurrTask();
ctx.setCurrTask(currTask);
ctx.addRootIfPossible(currTask);
@@ -88,9 +88,9 @@ public class GenMRFileSink1 implements NodeProcessor {
// If this file sink desc has been processed due to a linked file sink desc,
// use that task
- Map<FileSinkDesc, Task<? extends Serializable>> fileSinkDescs = ctx.getLinkedFileDescTasks();
+ Map<FileSinkDesc, Task<?>> fileSinkDescs = ctx.getLinkedFileDescTasks();
if (fileSinkDescs != null) {
- Task<? extends Serializable> childTask = fileSinkDescs.get(fsOp.getConf());
+ Task<?> childTask = fileSinkDescs.get(fsOp.getConf());
processLinkedFileDesc(ctx, childTask);
return true;
}
@@ -119,10 +119,10 @@ public class GenMRFileSink1 implements NodeProcessor {
// There are linked file sink operators and child tasks are present
if (fileSinkDesc.isLinkedFileSink() && (currTask.getChildTasks() != null) &&
(currTask.getChildTasks().size() == 1)) {
- Map<FileSinkDesc, Task<? extends Serializable>> linkedFileDescTasks =
+ Map<FileSinkDesc, Task<?>> linkedFileDescTasks =
ctx.getLinkedFileDescTasks();
if (linkedFileDescTasks == null) {
- linkedFileDescTasks = new HashMap<FileSinkDesc, Task<? extends Serializable>>();
+ linkedFileDescTasks = new HashMap<FileSinkDesc, Task<?>>();
ctx.setLinkedFileDescTasks(linkedFileDescTasks);
}
@@ -145,8 +145,8 @@ public class GenMRFileSink1 implements NodeProcessor {
* Use the task created by the first linked file descriptor
*/
private void processLinkedFileDesc(GenMRProcContext ctx,
- Task<? extends Serializable> childTask) throws SemanticException {
- Task<? extends Serializable> currTask = ctx.getCurrTask();
+ Task<?> childTask) throws SemanticException {
+ Task<?> currTask = ctx.getCurrTask();
TableScanOperator currTopOp = ctx.getCurrTopOp();
if (currTopOp != null && !ctx.isSeenOp(currTask, currTopOp)) {
String currAliasId = ctx.getCurrAliasId();
@@ -176,7 +176,7 @@ public class GenMRFileSink1 implements NodeProcessor {
NodeProcessorCtx opProcCtx, boolean chDir) throws SemanticException {
GenMRProcContext ctx = (GenMRProcContext) opProcCtx;
- Task<? extends Serializable> currTask = ctx.getCurrTask();
+ Task<?> currTask = ctx.getCurrTask();
// If the directory needs to be changed, send the new directory
Path dest = null;
@@ -195,7 +195,7 @@ public class GenMRFileSink1 implements NodeProcessor {
TableScanOperator currTopOp = ctx.getCurrTopOp();
String currAliasId = ctx.getCurrAliasId();
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap =
ctx.getOpTaskMap();
// In case of multi-table insert, the path to alias mapping is needed for
@@ -203,7 +203,7 @@ public class GenMRFileSink1 implements NodeProcessor {
// reducer, treat it as a plan with null reducer
// If it is a map-only job, the task needs to be processed
if (currTopOp != null) {
- Task<? extends Serializable> mapTask = opTaskMap.get(null);
+ Task<?> mapTask = opTaskMap.get(null);
if (mapTask == null) {
if (!ctx.isSeenOp(currTask, currTopOp)) {
GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currTask, false, ctx);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
index a6e2f53..bbda668 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
@@ -56,7 +56,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
* GenMapRedCtx is used to keep track of the current state.
*/
public static class GenMapRedCtx {
- Task<? extends Serializable> currTask;
+ Task<?> currTask;
String currAliasId;
public GenMapRedCtx() {
@@ -67,7 +67,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
* the current task
* @param currAliasId
*/
- public GenMapRedCtx(Task<? extends Serializable> currTask, String currAliasId) {
+ public GenMapRedCtx(Task<?> currTask, String currAliasId) {
this.currTask = currTask;
this.currAliasId = currAliasId;
}
@@ -75,7 +75,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
/**
* @return current task
*/
- public Task<? extends Serializable> getCurrTask() {
+ public Task<?> getCurrTask() {
return currTask;
}
@@ -92,19 +92,19 @@ public class GenMRProcContext implements NodeProcessorCtx {
*
*/
public static class GenMRUnionCtx {
- final Task<? extends Serializable> uTask;
+ final Task<?> uTask;
List<String> taskTmpDir;
List<TableDesc> tt_desc;
List<TableScanOperator> listTopOperators;
- public GenMRUnionCtx(Task<? extends Serializable> uTask) {
+ public GenMRUnionCtx(Task<?> uTask) {
this.uTask = uTask;
taskTmpDir = new ArrayList<String>();
tt_desc = new ArrayList<TableDesc>();
listTopOperators = new ArrayList<>();
}
- public Task<? extends Serializable> getUTask() {
+ public Task<?> getUTask() {
return uTask;
}
@@ -135,19 +135,19 @@ public class GenMRProcContext implements NodeProcessorCtx {
private HiveConf conf;
private
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap;
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap;
private
- HashMap<Task<? extends Serializable>, List<Operator<? extends OperatorDesc>>> taskToSeenOps;
+ HashMap<Task<?>, List<Operator<? extends OperatorDesc>>> taskToSeenOps;
private HashMap<UnionOperator, GenMRUnionCtx> unionTaskMap;
private List<FileSinkOperator> seenFileSinkOps;
private ParseContext parseCtx;
private List<Task<MoveWork>> mvTask;
- private List<Task<? extends Serializable>> rootTasks;
+ private List<Task<?>> rootTasks;
private LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx;
- private Task<? extends Serializable> currTask;
+ private Task<?> currTask;
private TableScanOperator currTopOp;
private UnionOperator currUnionOp;
private String currAliasId;
@@ -155,7 +155,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
// If many fileSinkDescs are linked to each other, it is a good idea to keep track of
// tasks for first fileSinkDesc. others can use it
- private Map<FileSinkDesc, Task<? extends Serializable>> linkedFileDescTasks;
+ private Map<FileSinkDesc, Task<?>> linkedFileDescTasks;
/**
* Set of read entities. This list is generated by the walker and is passed to
@@ -191,10 +191,10 @@ public class GenMRProcContext implements NodeProcessorCtx {
*/
public GenMRProcContext(
HiveConf conf,
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap,
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap,
ParseContext parseCtx,
List<Task<MoveWork>> mvTask,
- List<Task<? extends Serializable>> rootTasks,
+ List<Task<?>> rootTasks,
LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
this.conf = conf;
@@ -210,7 +210,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
currUnionOp = null;
currAliasId = null;
unionTaskMap = new HashMap<UnionOperator, GenMRUnionCtx>();
- taskToSeenOps = new HashMap<Task<? extends Serializable>,
+ taskToSeenOps = new HashMap<Task<?>,
List<Operator<? extends OperatorDesc>>>();
dependencyTaskForMultiInsert = null;
linkedFileDescTasks = null;
@@ -231,7 +231,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
* @return reducer to task mapping
*/
public HashMap<Operator<? extends OperatorDesc>,
- Task<? extends Serializable>> getOpTaskMap() {
+ Task<?>> getOpTaskMap() {
return opTaskMap;
}
@@ -240,7 +240,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
* reducer to task mapping
*/
public void setOpTaskMap(
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap) {
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap) {
this.opTaskMap = opTaskMap;
}
@@ -305,7 +305,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
/**
* @return root tasks for the plan
*/
- public List<Task<? extends Serializable>> getRootTasks() {
+ public List<Task<?>> getRootTasks() {
return rootTasks;
}
@@ -313,11 +313,11 @@ public class GenMRProcContext implements NodeProcessorCtx {
* @param rootTasks
* root tasks for the plan
*/
- public void setRootTasks(List<Task<? extends Serializable>> rootTasks) {
+ public void setRootTasks(List<Task<?>> rootTasks) {
this.rootTasks = rootTasks;
}
- public boolean addRootIfPossible(Task<? extends Serializable> task) {
+ public boolean addRootIfPossible(Task<?> task) {
if (task.getParentTasks() == null || task.getParentTasks().isEmpty()) {
if (!rootTasks.contains(task)) {
return rootTasks.add(task);
@@ -345,7 +345,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
/**
* @return current task
*/
- public Task<? extends Serializable> getCurrTask() {
+ public Task<?> getCurrTask() {
return currTask;
}
@@ -353,7 +353,7 @@ public class GenMRProcContext implements NodeProcessorCtx {
* @param currTask
* current task
*/
- public void setCurrTask(Task<? extends Serializable> currTask) {
+ public void setCurrTask(Task<?> currTask) {
this.currTask = currTask;
}
@@ -454,12 +454,12 @@ public class GenMRProcContext implements NodeProcessorCtx {
return dependencyTaskForMultiInsert;
}
- public Map<FileSinkDesc, Task<? extends Serializable>> getLinkedFileDescTasks() {
+ public Map<FileSinkDesc, Task<?>> getLinkedFileDescTasks() {
return linkedFileDescTasks;
}
public void setLinkedFileDescTasks(
- Map<FileSinkDesc, Task<? extends Serializable>> linkedFileDescTasks) {
+ Map<FileSinkDesc, Task<?>> linkedFileDescTasks) {
this.linkedFileDescTasks = linkedFileDescTasks;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
index 8b30c82..033cbdc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
@@ -61,7 +61,7 @@ public class GenMRRedSink1 implements NodeProcessor {
Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2));
- Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
+ Task<?> currTask = mapredCtx.getCurrTask();
MapredWork currPlan = (MapredWork) currTask.getWork();
String currAliasId = mapredCtx.getCurrAliasId();
@@ -70,7 +70,7 @@ public class GenMRRedSink1 implements NodeProcessor {
"But found multiple children : " + op.getChildOperators());
}
Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
- Task<? extends Serializable> oldTask = ctx.getOpTaskMap().get(reducer);
+ Task<?> oldTask = ctx.getOpTaskMap().get(reducer);
ctx.setCurrAliasId(currAliasId);
ctx.setCurrTask(currTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
index 35cdc04..8c997c7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink2.java
@@ -56,12 +56,12 @@ public class GenMRRedSink2 implements NodeProcessor {
Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
- Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
+ Task<?> currTask = mapredCtx.getCurrTask();
String currAliasId = mapredCtx.getCurrAliasId();
Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
- Map<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx
+ Map<Operator<? extends OperatorDesc>, Task<?>> opTaskMap = ctx
.getOpTaskMap();
- Task<? extends Serializable> oldTask = opTaskMap.get(reducer);
+ Task<?> oldTask = opTaskMap.get(reducer);
ctx.setCurrAliasId(currAliasId);
ctx.setCurrTask(currTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
index fbab075..ed4bb30 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
@@ -67,7 +67,7 @@ public class GenMRRedSink3 implements NodeProcessor {
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(union);
- Task<? extends Serializable> unionTask = null;
+ Task<?> unionTask = null;
if(mapredCtx != null) {
unionTask = mapredCtx.getCurrTask();
} else {
@@ -76,9 +76,9 @@ public class GenMRRedSink3 implements NodeProcessor {
MapredWork plan = (MapredWork) unionTask.getWork();
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap = ctx
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap = ctx
.getOpTaskMap();
- Task<? extends Serializable> reducerTask = opTaskMap.get(reducer);
+ Task<?> reducerTask = opTaskMap.get(reducer);
ctx.setCurrTask(unionTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
index 8f01507..abf363a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
@@ -65,7 +65,7 @@ public class GenMRUnion1 implements NodeProcessor {
GenMRUnionCtx uCtxTask = ctx.getUnionTask(union);
if (uCtxTask != null) {
// get task associated with this union
- Task<? extends Serializable> uTask = ctx.getUnionTask(union).getUTask();
+ Task<?> uTask = ctx.getUnionTask(union).getUTask();
if (uTask != null) {
if (ctx.getCurrTask() != null && ctx.getCurrTask() != uTask) {
// if ctx.getCurrTask() is in rootTasks, should be removed
@@ -88,7 +88,7 @@ public class GenMRUnion1 implements NodeProcessor {
ctx.setUnionTask(union, uCtxTask);
}
- Task<? extends Serializable> uTask = ctx.getCurrTask();
+ Task<?> uTask = ctx.getCurrTask();
if (uTask.getParentTasks() == null
|| uTask.getParentTasks().isEmpty()) {
if (!ctx.getRootTasks().contains(uTask)) {
@@ -115,7 +115,7 @@ public class GenMRUnion1 implements NodeProcessor {
private void processSubQueryUnionCreateIntermediate(
Operator<? extends OperatorDesc> parent,
Operator<? extends OperatorDesc> child,
- Task<? extends Serializable> uTask, GenMRProcContext ctx,
+ Task<?> uTask, GenMRProcContext ctx,
GenMRUnionCtx uCtxTask) {
ParseContext parseCtx = ctx.getParseCtx();
@@ -141,7 +141,7 @@ public class GenMRUnion1 implements NodeProcessor {
// assembled in the union context and later used to initialize the union
// plan
- Task<? extends Serializable> currTask = ctx.getCurrTask();
+ Task<?> currTask = ctx.getCurrTask();
currTask.addDependentTask(uTask);
if (ctx.getRootTasks().contains(uTask)) {
ctx.getRootTasks().remove(uTask);
@@ -168,7 +168,7 @@ public class GenMRUnion1 implements NodeProcessor {
throws SemanticException {
// The current plan can be thrown away after being merged with the union
// plan
- Task<? extends Serializable> uTask = uCtxTask.getUTask();
+ Task<?> uTask = uCtxTask.getUTask();
ctx.setCurrTask(uTask);
TableScanOperator topOp = ctx.getCurrTopOp();
if (topOp != null && !ctx.isSeenOp(uTask, topOp)) {
@@ -220,10 +220,10 @@ public class GenMRUnion1 implements NodeProcessor {
assert uPrsCtx != null;
- Task<? extends Serializable> currTask = ctx.getCurrTask();
+ Task<?> currTask = ctx.getCurrTask();
int pos = UnionProcFactory.getPositionParent(union, stack);
- Task<? extends Serializable> uTask = null;
+ Task<?> uTask = null;
MapredWork uPlan = null;
// union is encountered for the first time
@@ -272,7 +272,7 @@ public class GenMRUnion1 implements NodeProcessor {
}
private boolean shouldBeRootTask(
- Task<? extends Serializable> currTask) {
+ Task<?> currTask) {
return currTask.getParentTasks() == null
|| (currTask.getParentTasks().size() == 0);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
index 408ab08..73ca658 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
@@ -152,9 +152,9 @@ public final class GenMapRedUtils {
Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx =
opProcCtx.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
- Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
+ Task<?> currTask = mapredCtx.getCurrTask();
MapredWork plan = (MapredWork) currTask.getWork();
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap =
opProcCtx.getOpTaskMap();
TableScanOperator currTopOp = opProcCtx.getCurrTopOp();
@@ -195,11 +195,11 @@ public final class GenMapRedUtils {
*/
public static void initUnionPlan(ReduceSinkOperator op, UnionOperator currUnionOp,
GenMRProcContext opProcCtx,
- Task<? extends Serializable> unionTask) throws SemanticException {
+ Task<?> unionTask) throws SemanticException {
Operator<? extends OperatorDesc> reducer = op.getChildOperators().get(0);
MapredWork plan = (MapredWork) unionTask.getWork();
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap =
opProcCtx.getOpTaskMap();
opTaskMap.put(reducer, unionTask);
@@ -219,7 +219,7 @@ public final class GenMapRedUtils {
}
private static void setUnionPlan(GenMRProcContext opProcCtx,
- boolean local, Task<? extends Serializable> currTask, GenMRUnionCtx uCtx,
+ boolean local, Task<?> currTask, GenMRUnionCtx uCtx,
boolean mergeTask) throws SemanticException {
TableScanOperator currTopOp = opProcCtx.getCurrTopOp();
@@ -265,7 +265,7 @@ public final class GenMapRedUtils {
* for the union. The plan has already been created.
*/
public static void initUnionPlan(GenMRProcContext opProcCtx, UnionOperator currUnionOp,
- Task<? extends Serializable> currTask, boolean local)
+ Task<?> currTask, boolean local)
throws SemanticException {
// In case of lateral views followed by a join, the same tree
// can be traversed more than one
@@ -281,8 +281,8 @@ public final class GenMapRedUtils {
*/
public static void joinUnionPlan(GenMRProcContext opProcCtx,
UnionOperator currUnionOp,
- Task<? extends Serializable> currentUnionTask,
- Task<? extends Serializable> existingTask, boolean local)
+ Task<?> currentUnionTask,
+ Task<?> existingTask, boolean local)
throws SemanticException {
assert currUnionOp != null;
GenMRUnionCtx uCtx = opProcCtx.getUnionTask(currUnionOp);
@@ -290,7 +290,7 @@ public final class GenMapRedUtils {
setUnionPlan(opProcCtx, local, existingTask, uCtx, true);
- List<Task<? extends Serializable>> parTasks = null;
+ List<Task<?>> parTasks = null;
if (opProcCtx.getRootTasks().contains(currentUnionTask)) {
opProcCtx.getRootTasks().remove(currentUnionTask);
if (!opProcCtx.getRootTasks().contains(existingTask) &&
@@ -301,17 +301,17 @@ public final class GenMapRedUtils {
if ((currentUnionTask != null) && (currentUnionTask.getParentTasks() != null)
&& !currentUnionTask.getParentTasks().isEmpty()) {
- parTasks = new ArrayList<Task<? extends Serializable>>();
+ parTasks = new ArrayList<Task<?>>();
parTasks.addAll(currentUnionTask.getParentTasks());
Object[] parTaskArr = parTasks.toArray();
for (Object parTask : parTaskArr) {
- ((Task<? extends Serializable>) parTask)
+ ((Task<?>) parTask)
.removeDependentTask(currentUnionTask);
}
}
if ((currentUnionTask != null) && (parTasks != null)) {
- for (Task<? extends Serializable> parTask : parTasks) {
+ for (Task<?> parTask : parTasks) {
parTask.addDependentTask(existingTask);
if (opProcCtx.getRootTasks().contains(existingTask)) {
opProcCtx.getRootTasks().remove(existingTask);
@@ -332,22 +332,22 @@ public final class GenMapRedUtils {
* @param opProcCtx
* processing context
*/
- public static void joinPlan(Task<? extends Serializable> currTask,
- Task<? extends Serializable> oldTask, GenMRProcContext opProcCtx)
+ public static void joinPlan(Task<?> currTask,
+ Task<?> oldTask, GenMRProcContext opProcCtx)
throws SemanticException {
assert currTask != null && oldTask != null;
TableScanOperator currTopOp = opProcCtx.getCurrTopOp();
- List<Task<? extends Serializable>> parTasks = null;
+ List<Task<?>> parTasks = null;
// terminate the old task and make current task dependent on it
if (currTask.getParentTasks() != null
&& !currTask.getParentTasks().isEmpty()) {
- parTasks = new ArrayList<Task<? extends Serializable>>();
+ parTasks = new ArrayList<Task<?>>();
parTasks.addAll(currTask.getParentTasks());
Object[] parTaskArr = parTasks.toArray();
for (Object element : parTaskArr) {
- ((Task<? extends Serializable>) element).removeDependentTask(currTask);
+ ((Task<?>) element).removeDependentTask(currTask);
}
}
@@ -356,7 +356,7 @@ public final class GenMapRedUtils {
}
if (parTasks != null) {
- for (Task<? extends Serializable> parTask : parTasks) {
+ for (Task<?> parTask : parTasks) {
parTask.addDependentTask(oldTask);
}
}
@@ -374,7 +374,7 @@ public final class GenMapRedUtils {
* If currTopOp is not set for input of the task, add input for to the task
*/
static boolean mergeInput(TableScanOperator currTopOp,
- GenMRProcContext opProcCtx, Task<? extends Serializable> task, boolean local)
+ GenMRProcContext opProcCtx, Task<?> task, boolean local)
throws SemanticException {
if (!opProcCtx.isSeenOp(task, currTopOp)) {
String currAliasId = opProcCtx.getCurrAliasId();
@@ -389,7 +389,7 @@ public final class GenMapRedUtils {
* Split and link two tasks by temporary file : pRS-FS / TS-cRS-OP
*/
static void splitPlan(ReduceSinkOperator cRS,
- Task<? extends Serializable> parentTask, Task<? extends Serializable> childTask,
+ Task<?> parentTask, Task<?> childTask,
GenMRProcContext opProcCtx) throws SemanticException {
assert parentTask != null && childTask != null;
splitTasks(cRS, parentTask, childTask, opProcCtx);
@@ -408,10 +408,10 @@ public final class GenMapRedUtils {
throws SemanticException {
// Generate a new task
ParseContext parseCtx = opProcCtx.getParseCtx();
- Task<? extends Serializable> parentTask = opProcCtx.getCurrTask();
+ Task<?> parentTask = opProcCtx.getCurrTask();
MapredWork childPlan = getMapRedWork(parseCtx);
- Task<? extends Serializable> childTask = TaskFactory.get(childPlan);
+ Task<?> childTask = TaskFactory.get(childPlan);
Operator<? extends OperatorDesc> reducer = cRS.getChildOperators().get(0);
// Add the reducer
@@ -850,12 +850,12 @@ public final class GenMapRedUtils {
*
* @param task
*/
- public static void setKeyAndValueDescForTaskTree(Task<? extends Serializable> task) {
+ public static void setKeyAndValueDescForTaskTree(Task<?> task) {
if (task instanceof ConditionalTask) {
- List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task)
+ List<Task<?>> listTasks = ((ConditionalTask) task)
.getListTasks();
- for (Task<? extends Serializable> tsk : listTasks) {
+ for (Task<?> tsk : listTasks) {
setKeyAndValueDescForTaskTree(tsk);
}
} else if (task instanceof ExecDriver) {
@@ -887,7 +887,7 @@ public final class GenMapRedUtils {
return;
}
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
setKeyAndValueDescForTaskTree(childTask);
}
}
@@ -919,7 +919,7 @@ public final class GenMapRedUtils {
* for an older release will also require picking HIVE-17195 at the least.
*/
public static void finalMapWorkChores(
- List<Task<? extends Serializable>> tasks, Configuration conf,
+ List<Task<?>> tasks, Configuration conf,
Interner<TableDesc> interner) {
List<ExecDriver> mrTasks = Utilities.getMRTasks(tasks);
if (!mrTasks.isEmpty()) {
@@ -1062,7 +1062,7 @@ public final class GenMapRedUtils {
* @param opProcCtx context
**/
private static void splitTasks(ReduceSinkOperator op,
- Task<? extends Serializable> parentTask, Task<? extends Serializable> childTask,
+ Task<?> parentTask, Task<?> childTask,
GenMRProcContext opProcCtx) throws SemanticException {
if (op.getNumParent() != 1) {
throw new IllegalStateException("Expecting operator " + op + " to have one parent. " +
@@ -1074,7 +1074,7 @@ public final class GenMapRedUtils {
// Root Task cannot depend on any other task, therefore childTask cannot be
// a root Task
- List<Task<? extends Serializable>> rootTasks = opProcCtx.getRootTasks();
+ List<Task<?>> rootTasks = opProcCtx.getRootTasks();
if (rootTasks.contains(childTask)) {
rootTasks.remove(childTask);
}
@@ -1262,7 +1262,7 @@ public final class GenMapRedUtils {
public static void createMRWorkForMergingFiles(FileSinkOperator fsInput,
Path finalName, DependencyCollectionTask dependencyTask,
List<Task<MoveWork>> mvTasks, HiveConf conf,
- Task<? extends Serializable> currTask, LineageState lineageState)
+ Task<?> currTask, LineageState lineageState)
throws SemanticException {
//
@@ -1404,7 +1404,7 @@ public final class GenMapRedUtils {
* @param dependencyTask
*/
private static void linkMoveTask(Task<MoveWork> mvTask,
- Task<? extends Serializable> task, HiveConf hconf,
+ Task<?> task, HiveConf hconf,
DependencyCollectionTask dependencyTask) {
if (task.getDependentTasks() == null || task.getDependentTasks().isEmpty()) {
@@ -1412,7 +1412,7 @@ public final class GenMapRedUtils {
addDependentMoveTasks(mvTask, hconf, task, dependencyTask);
} else {
// Otherwise, for each child run this method recursively
- for (Task<? extends Serializable> childTask : task.getDependentTasks()) {
+ for (Task<?> childTask : task.getDependentTasks()) {
linkMoveTask(mvTask, childTask, hconf, dependencyTask);
}
}
@@ -1430,7 +1430,7 @@ public final class GenMapRedUtils {
* @param dependencyTask
*/
public static void addDependentMoveTasks(Task<MoveWork> mvTask, HiveConf hconf,
- Task<? extends Serializable> parentTask, DependencyCollectionTask dependencyTask) {
+ Task<?> parentTask, DependencyCollectionTask dependencyTask) {
if (mvTask != null) {
if (dependencyTask != null) {
@@ -1483,7 +1483,7 @@ public final class GenMapRedUtils {
* HiveConf
*/
public static void addStatsTask(FileSinkOperator nd, MoveTask mvTask,
- Task<? extends Serializable> currTask, HiveConf hconf) {
+ Task<?> currTask, HiveConf hconf) {
MoveWork mvWork = mvTask.getWork();
BasicStatsWork statsWork = null;
@@ -1552,7 +1552,7 @@ public final class GenMapRedUtils {
columnStatsWork.truncateExisting(truncate);
columnStatsWork.setSourceTask(currTask);
- Task<? extends Serializable> statsTask = TaskFactory.get(columnStatsWork);
+ Task<?> statsTask = TaskFactory.get(columnStatsWork);
// subscribe feeds from the MoveTask so that MoveTask can forward the list
// of dynamic partition list to the StatsTask
@@ -1780,7 +1780,7 @@ public final class GenMapRedUtils {
*/
@SuppressWarnings("unchecked")
private static ConditionalTask createCondTask(HiveConf conf,
- Task<? extends Serializable> currTask, MoveWork mvWork, Serializable mergeWork,
+ Task<?> currTask, MoveWork mvWork, Serializable mergeWork,
Path condInputPath, Path condOutputPath, Task<MoveWork> moveTaskToLink,
DependencyCollectionTask dependencyTask, LineageState lineageState) {
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
@@ -1807,10 +1807,10 @@ public final class GenMapRedUtils {
// conflicts.
// TODO: if we are not dealing with concatenate DDL, we should not create a merge+move path
// because it should be impossible to get incompatible outputs.
- Task<? extends Serializable> mergeOnlyMergeTask = TaskFactory.get(mergeWork);
- Task<? extends Serializable> moveOnlyMoveTask = TaskFactory.get(workForMoveOnlyTask);
- Task<? extends Serializable> mergeAndMoveMergeTask = TaskFactory.get(mergeWork);
- Task<? extends Serializable> mergeAndMoveMoveTask = TaskFactory.get(moveWork);
+ Task<?> mergeOnlyMergeTask = TaskFactory.get(mergeWork);
+ Task<?> moveOnlyMoveTask = TaskFactory.get(workForMoveOnlyTask);
+ Task<?> mergeAndMoveMergeTask = TaskFactory.get(mergeWork);
+ Task<?> mergeAndMoveMoveTask = TaskFactory.get(moveWork);
// NOTE! It is necessary merge task is the parent of the move task, and not
// the other way around, for the proper execution of the execute method of
@@ -1823,7 +1823,7 @@ public final class GenMapRedUtils {
ConditionalWork cndWork = new ConditionalWork(listWorks);
- List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> listTasks = new ArrayList<Task<?>>();
listTasks.add(moveOnlyMoveTask);
listTasks.add(mergeOnlyMergeTask);
listTasks.add(mergeAndMoveMergeTask);
@@ -1902,7 +1902,7 @@ public final class GenMapRedUtils {
* Returns true iff the fsOp requires a merge
*/
public static boolean isMergeRequired(List<Task<MoveWork>> mvTasks, HiveConf hconf,
- FileSinkOperator fsOp, Task<? extends Serializable> currTask, boolean isInsertTable) {
+ FileSinkOperator fsOp, Task<?> currTask, boolean isInsertTable) {
// Has the user enabled merging of files for map-only jobs or for all jobs
if (mvTasks == null || mvTasks.isEmpty()) {
return false;
@@ -1939,7 +1939,7 @@ public final class GenMapRedUtils {
}
private static boolean isMergeRequiredForMr(HiveConf hconf,
- FileSinkOperator fsOp, Task<? extends Serializable> currTask) {
+ FileSinkOperator fsOp, Task<?> currTask) {
if (fsOp.getConf().isLinkedFileSink()) {
// If the user has HIVEMERGEMAPREDFILES set to false, the idea was the
// number of reducers are few, so the number of files anyway are small.
@@ -1977,7 +1977,7 @@ public final class GenMapRedUtils {
* @param dependencyTask
* @return
*/
- public static Path createMoveTask(Task<? extends Serializable> currTask, boolean chDir,
+ public static Path createMoveTask(Task<?> currTask, boolean chDir,
FileSinkOperator fsOp, ParseContext parseCtx, List<Task<MoveWork>> mvTasks,
HiveConf hconf, DependencyCollectionTask dependencyTask) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
index 6bf4deb..21d792e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java
@@ -146,7 +146,7 @@ public final class MapJoinFactory {
* position of the parent
*/
private static void initMapJoinPlan(AbstractMapJoinOperator<? extends MapJoinDesc> op,
- Task<? extends Serializable> currTask,
+ Task<?> currTask,
GenMRProcContext opProcCtx, boolean local)
throws SemanticException {
@@ -171,7 +171,7 @@ public final class MapJoinFactory {
* @param pos
* position of the parent in the stack
*/
- private static void joinMapJoinPlan(Task<? extends Serializable> oldTask,
+ private static void joinMapJoinPlan(Task<?> oldTask,
GenMRProcContext opProcCtx, boolean local)
throws SemanticException {
TableScanOperator currTopOp = opProcCtx.getCurrTopOp();
@@ -199,12 +199,12 @@ public final class MapJoinFactory {
Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(mapJoin.getParentOperators().get(pos));
- Task<? extends Serializable> currTask = mapredCtx.getCurrTask();
+ Task<?> currTask = mapredCtx.getCurrTask();
MapredWork currPlan = (MapredWork) currTask.getWork();
String currAliasId = mapredCtx.getCurrAliasId();
- HashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>> opTaskMap =
+ HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap =
ctx.getOpTaskMap();
- Task<? extends Serializable> oldTask = opTaskMap.get(mapJoin);
+ Task<?> oldTask = opTaskMap.get(mapJoin);
ctx.setCurrAliasId(currAliasId);
ctx.setCurrTask(currTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java
index 4ac2567..ba05ec7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java
@@ -46,18 +46,18 @@ public abstract class AbstractJoinTaskDispatcher implements Dispatcher {
physicalContext = context;
}
- public abstract Task<? extends Serializable> processCurrentTask(MapRedTask currTask,
+ public abstract Task<?> processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask, Context context)
throws SemanticException;
protected void replaceTaskWithConditionalTask(
- Task<? extends Serializable> currTask, ConditionalTask cndTsk) {
+ Task<?> currTask, ConditionalTask cndTsk) {
// add this task into task tree
// set all parent tasks
- List<Task<? extends Serializable>> parentTasks = currTask.getParentTasks();
+ List<Task<?>> parentTasks = currTask.getParentTasks();
currTask.setParentTasks(null);
if (parentTasks != null) {
- for (Task<? extends Serializable> tsk : parentTasks) {
+ for (Task<?> tsk : parentTasks) {
// make new generated task depends on all the parent tasks of current task.
tsk.addDependentTask(cndTsk);
// remove the current task from its original parent task's dependent task
@@ -69,13 +69,13 @@ public abstract class AbstractJoinTaskDispatcher implements Dispatcher {
physicalContext.addToRootTask(cndTsk);
}
// set all child tasks
- List<Task<? extends Serializable>> oldChildTasks = currTask.getChildTasks();
+ List<Task<?>> oldChildTasks = currTask.getChildTasks();
if (oldChildTasks != null) {
- for (Task<? extends Serializable> tsk : cndTsk.getListTasks()) {
+ for (Task<?> tsk : cndTsk.getListTasks()) {
if (tsk.equals(currTask)) {
continue;
}
- for (Task<? extends Serializable> oldChild : oldChildTasks) {
+ for (Task<?> oldChild : oldChildTasks) {
tsk.addDependentTask(oldChild);
}
}
@@ -85,13 +85,13 @@ public abstract class AbstractJoinTaskDispatcher implements Dispatcher {
// Replace the task with the new task. Copy the children and parents of the old
// task to the new task.
protected void replaceTask(
- Task<? extends Serializable> currTask, Task<? extends Serializable> newTask) {
+ Task<?> currTask, Task<?> newTask) {
// add this task into task tree
// set all parent tasks
- List<Task<? extends Serializable>> parentTasks = currTask.getParentTasks();
+ List<Task<?>> parentTasks = currTask.getParentTasks();
currTask.setParentTasks(null);
if (parentTasks != null) {
- for (Task<? extends Serializable> tsk : parentTasks) {
+ for (Task<?> tsk : parentTasks) {
// remove the current task from its original parent task's dependent task
tsk.removeDependentTask(currTask);
// make new generated task depends on all the parent tasks of current task.
@@ -104,10 +104,10 @@ public abstract class AbstractJoinTaskDispatcher implements Dispatcher {
}
// set all child tasks
- List<Task<? extends Serializable>> oldChildTasks = currTask.getChildTasks();
+ List<Task<?>> oldChildTasks = currTask.getChildTasks();
currTask.setChildTasks(null);
if (oldChildTasks != null) {
- for (Task<? extends Serializable> tsk : oldChildTasks) {
+ for (Task<?> tsk : oldChildTasks) {
// remove the current task from its original parent task's dependent task
tsk.getParentTasks().remove(currTask);
// make new generated task depends on all the parent tasks of current task.
@@ -160,21 +160,21 @@ public abstract class AbstractJoinTaskDispatcher implements Dispatcher {
TaskGraphWalkerContext walkerCtx = (TaskGraphWalkerContext) nodeOutputs[0];
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
// not map reduce task or not conditional task, just skip
if (currTask.isMapRedTask()) {
if (currTask instanceof ConditionalTask) {
// get the list of task
- List<Task<? extends Serializable>> taskList = ((ConditionalTask) currTask).getListTasks();
- for (Task<? extends Serializable> tsk : taskList) {
+ List<Task<?>> taskList = ((ConditionalTask) currTask).getListTasks();
+ for (Task<?> tsk : taskList) {
if (tsk.isMapRedTask()) {
- Task<? extends Serializable> newTask = this.processCurrentTask((MapRedTask) tsk,
+ Task<?> newTask = this.processCurrentTask((MapRedTask) tsk,
((ConditionalTask) currTask), physicalContext.getContext());
walkerCtx.addToDispatchList(newTask);
}
}
} else {
- Task<? extends Serializable> newTask =
+ Task<?> newTask =
this.processCurrentTask((MapRedTask) currTask, null, physicalContext.getContext());
walkerCtx.addToDispatchList(newTask);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java
index c04b471..2727e80 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AnnotateRunTimeStatsOptimizer.java
@@ -68,7 +68,7 @@ public class AnnotateRunTimeStatsOptimizer implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
Set<Operator<? extends OperatorDesc>> ops = new HashSet<>();
if (currTask instanceof MapRedTask) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
index 0d9d5e0..9c4a0c2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java
@@ -197,7 +197,7 @@ public class CommonJoinTaskDispatcher extends AbstractJoinTaskDispatcher impleme
return;
}
- Task<? extends Serializable> childTask = mapJoinTask.getChildTasks().get(0);
+ Task<?> childTask = mapJoinTask.getChildTasks().get(0);
if (!(childTask instanceof MapRedTask)) {
// Nothing to do if it is not a MapReduce task.
return;
@@ -334,13 +334,13 @@ public class CommonJoinTaskDispatcher extends AbstractJoinTaskDispatcher impleme
}
// Step 2.4: Remove this MapJoin task
- List<Task<? extends Serializable>> parentTasks = mapJoinTask.getParentTasks();
+ List<Task<?>> parentTasks = mapJoinTask.getParentTasks();
mapJoinTask.setParentTasks(null);
mapJoinTask.setChildTasks(null);
childMapRedTask.getParentTasks().remove(mapJoinTask);
if (parentTasks != null) {
childMapRedTask.getParentTasks().addAll(parentTasks);
- for (Task<? extends Serializable> parentTask : parentTasks) {
+ for (Task<?> parentTask : parentTasks) {
parentTask.getChildTasks().remove(mapJoinTask);
if (!parentTask.getChildTasks().contains(childMapRedTask)) {
parentTask.getChildTasks().add(childMapRedTask);
@@ -371,7 +371,7 @@ public class CommonJoinTaskDispatcher extends AbstractJoinTaskDispatcher impleme
}
@Override
- public Task<? extends Serializable> processCurrentTask(MapRedTask currTask,
+ public Task<?> processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask, Context context)
throws SemanticException {
@@ -386,12 +386,12 @@ public class CommonJoinTaskDispatcher extends AbstractJoinTaskDispatcher impleme
// create conditional work list and task list
List<Serializable> listWorks = new ArrayList<Serializable>();
- List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> listTasks = new ArrayList<Task<?>>();
// create task to aliases mapping and alias to input file mapping for resolver
// Must be deterministic order map for consistent q-test output across Java versions
- HashMap<Task<? extends Serializable>, Set<String>> taskToAliases =
- new LinkedHashMap<Task<? extends Serializable>, Set<String>>();
+ HashMap<Task<?>, Set<String>> taskToAliases =
+ new LinkedHashMap<Task<?>, Set<String>>();
Map<Path, List<String>> pathToAliases = currWork.getPathToAliases();
Map<String, Operator<? extends OperatorDesc>> aliasToWork = currWork.getAliasToWork();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductHandler.java
index 71d060a..50b9c2b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductHandler.java
@@ -101,16 +101,16 @@ public class CrossProductHandler implements PhysicalPlanResolver, Dispatcher {
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
@SuppressWarnings("unchecked")
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof MapRedTask) {
MapRedTask mrTsk = (MapRedTask)currTask;
MapredWork mrWrk = mrTsk.getWork();
checkMapJoins(mrTsk);
checkMRReducer(currTask.toString(), mrWrk);
} else if (currTask instanceof ConditionalTask ) {
- List<Task<? extends Serializable>> taskListInConditionalTask =
+ List<Task<?>> taskListInConditionalTask =
((ConditionalTask) currTask).getListTasks();
- for(Task<? extends Serializable> tsk: taskListInConditionalTask){
+ for(Task<?> tsk: taskListInConditionalTask){
dispatch(tsk, stack, nodeOutputs);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
index f7cedfe..5be1329 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
@@ -108,7 +108,7 @@ public final class GenMRSkewJoinProcessor {
*/
@SuppressWarnings("unchecked")
public static void processSkewJoin(JoinOperator joinOp,
- Task<? extends Serializable> currTask, ParseContext parseCtx)
+ Task<?> currTask, ParseContext parseCtx)
throws SemanticException {
// We are trying to adding map joins to handle skew keys, and map join right
@@ -117,7 +117,7 @@ public final class GenMRSkewJoinProcessor {
return;
}
- List<Task<? extends Serializable>> children = currTask.getChildTasks();
+ List<Task<?>> children = currTask.getChildTasks();
Path baseTmpDir = parseCtx.getContext().getMRTmpPath();
@@ -149,10 +149,10 @@ public final class GenMRSkewJoinProcessor {
joinDescriptor.setSkewKeyDefinition(HiveConf.getIntVar(parseCtx.getConf(),
HiveConf.ConfVars.HIVESKEWJOINKEY));
- HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap =
- new HashMap<Path, Task<? extends Serializable>>();
+ HashMap<Path, Task<?>> bigKeysDirToTaskMap =
+ new HashMap<Path, Task<?>>();
List<Serializable> listWorks = new ArrayList<Serializable>();
- List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> listTasks = new ArrayList<Task<?>>();
MapredWork currPlan = (MapredWork) currTask.getWork();
TableDesc keyTblDesc = (TableDesc) currPlan.getReduceWork().getKeyDesc().clone();
@@ -331,7 +331,7 @@ public final class GenMRSkewJoinProcessor {
MapredWork w = new MapredWork();
w.setMapWork(newPlan);
- Task<? extends Serializable> skewJoinMapJoinTask = TaskFactory.get(w);
+ Task<?> skewJoinMapJoinTask = TaskFactory.get(w);
skewJoinMapJoinTask.setFetchSource(currTask.isFetchSource());
bigKeysDirToTaskMap.put(bigKeyDirPath, skewJoinMapJoinTask);
@@ -339,13 +339,13 @@ public final class GenMRSkewJoinProcessor {
listTasks.add(skewJoinMapJoinTask);
}
if (children != null) {
- for (Task<? extends Serializable> tsk : listTasks) {
- for (Task<? extends Serializable> oldChild : children) {
+ for (Task<?> tsk : listTasks) {
+ for (Task<?> oldChild : children) {
tsk.addDependentTask(oldChild);
}
}
- currTask.setChildTasks(new ArrayList<Task<? extends Serializable>>());
- for (Task<? extends Serializable> oldChild : children) {
+ currTask.setChildTasks(new ArrayList<Task<?>>());
+ for (Task<?> oldChild : children) {
oldChild.getParentTasks().remove(currTask);
}
listTasks.addAll(children);
@@ -358,7 +358,7 @@ public final class GenMRSkewJoinProcessor {
cndTsk.setListTasks(listTasks);
cndTsk.setResolver(new ConditionalResolverSkewJoin());
cndTsk.setResolverCtx(context);
- currTask.setChildTasks(new ArrayList<Task<? extends Serializable>>());
+ currTask.setChildTasks(new ArrayList<Task<?>>());
currTask.addDependentTask(cndTsk);
return;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
index 7f7f49b..8f96fd6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenSparkSkewJoinProcessor.java
@@ -82,7 +82,7 @@ public class GenSparkSkewJoinProcessor {
}
@SuppressWarnings("unchecked")
- public static void processSkewJoin(JoinOperator joinOp, Task<? extends Serializable> currTask,
+ public static void processSkewJoin(JoinOperator joinOp, Task<?> currTask,
ReduceWork reduceWork, ParseContext parseCtx) throws SemanticException {
SparkWork currentWork = ((SparkTask) currTask).getWork();
@@ -91,7 +91,7 @@ public class GenSparkSkewJoinProcessor {
return;
}
- List<Task<? extends Serializable>> children = currTask.getChildTasks();
+ List<Task<?>> children = currTask.getChildTasks();
Path baseTmpDir = parseCtx.getContext().getMRTmpPath();
@@ -214,16 +214,16 @@ public class GenSparkSkewJoinProcessor {
joinDescriptor.setKeyTableDesc(keyTblDesc);
// create N-1 map join tasks
- HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap =
- new HashMap<Path, Task<? extends Serializable>>();
+ HashMap<Path, Task<?>> bigKeysDirToTaskMap =
+ new HashMap<Path, Task<?>>();
List<Serializable> listWorks = new ArrayList<Serializable>();
- List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> listTasks = new ArrayList<Task<?>>();
for (int i = 0; i < numAliases - 1; i++) {
Byte src = tags[i];
HiveConf hiveConf = new HiveConf(parseCtx.getConf(),
GenSparkSkewJoinProcessor.class);
SparkWork sparkWork = new SparkWork(parseCtx.getConf().getVar(HiveConf.ConfVars.HIVEQUERYID));
- Task<? extends Serializable> skewJoinMapJoinTask = TaskFactory.get(sparkWork);
+ Task<?> skewJoinMapJoinTask = TaskFactory.get(sparkWork);
skewJoinMapJoinTask.setFetchSource(currTask.isFetchSource());
// create N TableScans
@@ -328,17 +328,17 @@ public class GenSparkSkewJoinProcessor {
listTasks.add(skewJoinMapJoinTask);
}
if (children != null) {
- for (Task<? extends Serializable> tsk : listTasks) {
- for (Task<? extends Serializable> oldChild : children) {
+ for (Task<?> tsk : listTasks) {
+ for (Task<?> oldChild : children) {
tsk.addDependentTask(oldChild);
}
}
- currTask.setChildTasks(new ArrayList<Task<? extends Serializable>>());
- for (Task<? extends Serializable> oldChild : children) {
+ currTask.setChildTasks(new ArrayList<Task<?>>());
+ for (Task<?> oldChild : children) {
oldChild.getParentTasks().remove(currTask);
}
listTasks.addAll(children);
- for (Task<? extends Serializable> oldChild : children) {
+ for (Task<?> oldChild : children) {
listWorks.add(oldChild.getWork());
}
}
@@ -350,7 +350,7 @@ public class GenSparkSkewJoinProcessor {
cndTsk.setListTasks(listTasks);
cndTsk.setResolver(new ConditionalResolverSkewJoin());
cndTsk.setResolverCtx(context);
- currTask.setChildTasks(new ArrayList<Task<? extends Serializable>>());
+ currTask.setChildTasks(new ArrayList<Task<?>>());
currTask.addDependentTask(cndTsk);
}
@@ -397,7 +397,7 @@ public class GenSparkSkewJoinProcessor {
hashTableSinkOp.getConf().setTag(tag);
}
- private static void setMemUsage(MapJoinOperator mapJoinOp, Task<? extends Serializable> task,
+ private static void setMemUsage(MapJoinOperator mapJoinOp, Task<?> task,
ParseContext parseContext) {
MapJoinResolver.LocalMapJoinProcCtx context =
new MapJoinResolver.LocalMapJoinProcCtx(task, parseContext);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java
index 2f2f04f..6c370f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapDecider.java
@@ -139,7 +139,7 @@ public class LlapDecider implements PhysicalPlanResolver {
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
@SuppressWarnings("unchecked")
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof TezTask) {
TezWork work = ((TezTask) currTask).getWork();
for (BaseWork w: work.getAllWork()) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java
index ec066ef..16fbe95 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/LlapPreVectorizationPass.java
@@ -82,7 +82,7 @@ public class LlapPreVectorizationPass implements PhysicalPlanResolver {
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
@SuppressWarnings("unchecked")
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof TezTask) {
TezWork work = ((TezTask) currTask).getWork();
for (BaseWork w: work.getAllWork()) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
index eac4768..484369b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java
@@ -96,7 +96,7 @@ public class MapJoinResolver implements PhysicalPlanResolver {
physicalContext = context;
}
- private void processCurrentTask(Task<? extends Serializable> currTask,
+ private void processCurrentTask(Task<?> currTask,
ConditionalTask conditionalTask) throws SemanticException {
// get current mapred work and its local work
MapredWork mapredWork = (MapredWork) currTask.getWork();
@@ -145,10 +145,10 @@ public class MapJoinResolver implements PhysicalPlanResolver {
}
newLocalWork.setHasStagedAlias(true);
// get all parent tasks
- List<Task<? extends Serializable>> parentTasks = currTask.getParentTasks();
+ List<Task<?>> parentTasks = currTask.getParentTasks();
currTask.setParentTasks(null);
if (parentTasks != null) {
- for (Task<? extends Serializable> tsk : parentTasks) {
+ for (Task<?> tsk : parentTasks) {
// make new generated task depends on all the parent tasks of current task.
tsk.addDependentTask(localTask);
// remove the current task from its original parent task's dependent task
@@ -162,7 +162,7 @@ public class MapJoinResolver implements PhysicalPlanResolver {
physicalContext.removeFromRootTask(currTask);
} else {
// set list task
- List<Task<? extends Serializable>> listTask = conditionalTask.getListTasks();
+ List<Task<?>> listTask = conditionalTask.getListTasks();
ConditionalWork conditionalWork = conditionalTask.getWork();
int index = listTask.indexOf(currTask);
listTask.set(index, localTask);
@@ -176,14 +176,14 @@ public class MapJoinResolver implements PhysicalPlanResolver {
// get bigKeysDirToTaskMap
ConditionalResolverSkewJoinCtx context = (ConditionalResolverSkewJoinCtx) conditionalTask
.getResolverCtx();
- HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap = context
+ HashMap<Path, Task<?>> bigKeysDirToTaskMap = context
.getDirToTaskMap();
// to avoid concurrent modify the hashmap
- HashMap<Path, Task<? extends Serializable>> newbigKeysDirToTaskMap = new HashMap<Path, Task<? extends Serializable>>();
+ HashMap<Path, Task<?>> newbigKeysDirToTaskMap = new HashMap<Path, Task<?>>();
// reset the resolver
- for (Map.Entry<Path, Task<? extends Serializable>> entry : bigKeysDirToTaskMap
+ for (Map.Entry<Path, Task<?>> entry : bigKeysDirToTaskMap
.entrySet()) {
- Task<? extends Serializable> task = entry.getValue();
+ Task<?> task = entry.getValue();
Path key = entry.getKey();
if (task.equals(currTask)) {
newbigKeysDirToTaskMap.put(key, localTask);
@@ -197,14 +197,14 @@ public class MapJoinResolver implements PhysicalPlanResolver {
// get bigKeysDirToTaskMap
ConditionalResolverCommonJoinCtx context = (ConditionalResolverCommonJoinCtx) conditionalTask
.getResolverCtx();
- HashMap<Task<? extends Serializable>, Set<String>> taskToAliases = context.getTaskToAliases();
+ HashMap<Task<?>, Set<String>> taskToAliases = context.getTaskToAliases();
// to avoid concurrent modify the hashmap
// Must be deterministic order map for consistent q-test output across Java versions
- HashMap<Task<? extends Serializable>, Set<String>> newTaskToAliases =
- new LinkedHashMap<Task<? extends Serializable>, Set<String>>();
+ HashMap<Task<?>, Set<String>> newTaskToAliases =
+ new LinkedHashMap<Task<?>, Set<String>>();
// reset the resolver
- for (Map.Entry<Task<? extends Serializable>, Set<String>> entry : taskToAliases.entrySet()) {
- Task<? extends Serializable> task = entry.getKey();
+ for (Map.Entry<Task<?>, Set<String>> entry : taskToAliases.entrySet()) {
+ Task<?> task = entry.getKey();
Set<String> key = new HashSet<String>(entry.getValue());
if (task.equals(currTask)) {
@@ -227,13 +227,13 @@ public class MapJoinResolver implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
// not map reduce task or not conditional task, just skip
if (currTask.isMapRedTask()) {
if (currTask instanceof ConditionalTask) {
// get the list of task
- List<Task<? extends Serializable>> taskList = ((ConditionalTask) currTask).getListTasks();
- for (Task<? extends Serializable> tsk : taskList) {
+ List<Task<?>> taskList = ((ConditionalTask) currTask).getListTasks();
+ for (Task<?> tsk : taskList) {
if (tsk.isMapRedTask()) {
this.processCurrentTask(tsk, ((ConditionalTask) currTask));
}
@@ -278,14 +278,14 @@ public class MapJoinResolver implements PhysicalPlanResolver {
* A container of current task and parse context.
*/
public static class LocalMapJoinProcCtx implements NodeProcessorCtx {
- private Task<? extends Serializable> currentTask;
+ private Task<?> currentTask;
private ParseContext parseCtx;
private List<Operator<? extends OperatorDesc>> dummyParentOp = null;
private boolean isFollowedByGroupBy;
private Map<MapJoinOperator, List<Operator<? extends OperatorDesc>>> directWorks;
- public LocalMapJoinProcCtx(Task<? extends Serializable> task, ParseContext parseCtx) {
+ public LocalMapJoinProcCtx(Task<?> task, ParseContext parseCtx) {
currentTask = task;
this.parseCtx = parseCtx;
dummyParentOp = new ArrayList<Operator<? extends OperatorDesc>>();
@@ -293,11 +293,11 @@ public class MapJoinResolver implements PhysicalPlanResolver {
isFollowedByGroupBy = false;
}
- public Task<? extends Serializable> getCurrentTask() {
+ public Task<?> getCurrentTask() {
return currentTask;
}
- public void setCurrentTask(Task<? extends Serializable> currentTask) {
+ public void setCurrentTask(Task<?> currentTask) {
this.currentTask = currentTask;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java
index 64f1e7b..11dba77 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MemoryDecider.java
@@ -84,7 +84,7 @@ public class MemoryDecider implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof StatsTask) {
currTask = ((StatsTask) currTask).getWork().getSourceTask();
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java
index b7dd90d..2fb666d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/NullScanTaskDispatcher.java
@@ -168,7 +168,7 @@ public class NullScanTaskDispatcher implements Dispatcher {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> task = (Task<? extends Serializable>) nd;
+ Task<?> task = (Task<?>) nd;
// create a the context for walking operators
ParseContext parseContext = physicalContext.getParseContext();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalContext.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalContext.java
index 321dcbe..18e1ceb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalContext.java
@@ -34,12 +34,12 @@ public class PhysicalContext {
protected HiveConf conf;
private ParseContext parseContext;
private Context context;
- protected List<Task<? extends Serializable>> rootTasks;
- protected Task<? extends Serializable> fetchTask;
+ protected List<Task<?>> rootTasks;
+ protected Task<?> fetchTask;
public PhysicalContext(HiveConf conf, ParseContext parseContext,
- Context context, List<Task<? extends Serializable>> rootTasks,
- Task<? extends Serializable> fetchTask) {
+ Context context, List<Task<?>> rootTasks,
+ Task<?> fetchTask) {
super();
this.conf = conf;
this.parseContext = parseContext;
@@ -72,27 +72,27 @@ public class PhysicalContext {
this.context = context;
}
- public List<Task<? extends Serializable>> getRootTasks() {
+ public List<Task<?>> getRootTasks() {
return rootTasks;
}
- public void setRootTasks(List<Task<? extends Serializable>> rootTasks) {
+ public void setRootTasks(List<Task<?>> rootTasks) {
this.rootTasks = rootTasks;
}
- public Task<? extends Serializable> getFetchTask() {
+ public Task<?> getFetchTask() {
return fetchTask;
}
- public void setFetchTask(Task<? extends Serializable> fetchTask) {
+ public void setFetchTask(Task<?> fetchTask) {
this.fetchTask = fetchTask;
}
- public void addToRootTask(Task<? extends Serializable> tsk){
+ public void addToRootTask(Task<?> tsk){
rootTasks.add(tsk);
}
- public void removeFromRootTask(Task<? extends Serializable> tsk){
+ public void removeFromRootTask(Task<?> tsk){
rootTasks.remove(tsk);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
index 9224350..c15d050 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SerializeFilter.java
@@ -68,7 +68,7 @@ public class SerializeFilter implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof StatsTask) {
currTask = ((StatsTask) currTask).getWork().getSourceTask();
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java
index 4f76b19..093226c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java
@@ -56,7 +56,7 @@ public final class SkewJoinProcFactory {
return null;
}
ParseContext parseContext = context.getParseCtx();
- Task<? extends Serializable> currentTsk = context.getCurrentTask();
+ Task<?> currentTsk = context.getCurrentTask();
GenMRSkewJoinProcessor.processSkewJoin(op, currentTsk, parseContext);
return null;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java
index bca6781..3cabd38 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java
@@ -76,7 +76,7 @@ public class SkewJoinResolver implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> task = (Task<? extends Serializable>) nd;
+ Task<?> task = (Task<?>) nd;
if (!task.isMapRedTask() || task instanceof ConditionalTask
|| ((MapredWork) task.getWork()).getReduceWork() == null) {
@@ -140,20 +140,20 @@ public class SkewJoinResolver implements PhysicalPlanResolver {
* A container of current task and parse context.
*/
public static class SkewJoinProcCtx implements NodeProcessorCtx {
- private Task<? extends Serializable> currentTask;
+ private Task<?> currentTask;
private ParseContext parseCtx;
- public SkewJoinProcCtx(Task<? extends Serializable> task,
+ public SkewJoinProcCtx(Task<?> task,
ParseContext parseCtx) {
currentTask = task;
this.parseCtx = parseCtx;
}
- public Task<? extends Serializable> getCurrentTask() {
+ public Task<?> getCurrentTask() {
return currentTask;
}
- public void setCurrentTask(Task<? extends Serializable> currentTask) {
+ public void setCurrentTask(Task<?> currentTask) {
this.currentTask = currentTask;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
index ebf1708..fbf6852 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
@@ -223,7 +223,7 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
}
@Override
- public Task<? extends Serializable> processCurrentTask(MapRedTask currTask,
+ public Task<?> processCurrentTask(MapRedTask currTask,
ConditionalTask conditionalTask, Context context)
throws SemanticException {
// whether it contains a sort merge join operator
@@ -252,12 +252,12 @@ public class SortMergeJoinTaskDispatcher extends AbstractJoinTaskDispatcher impl
// create conditional work list and task list
List<Serializable> listWorks = new ArrayList<Serializable>();
- List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> listTasks = new ArrayList<Task<?>>();
// create task to aliases mapping and alias to input file mapping for resolver
// Must be deterministic order map for consistent q-test output across Java versions
- HashMap<Task<? extends Serializable>, Set<String>> taskToAliases =
- new LinkedHashMap<Task<? extends Serializable>, Set<String>>();
+ HashMap<Task<?>, Set<String>> taskToAliases =
+ new LinkedHashMap<Task<?>, Set<String>>();
// Note that pathToAlias will behave as if the original plan was a join plan
Map<Path, List<String>> pathToAliases = currJoinWork.getMapWork().getPathToAliases();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
index 5024596..80248d6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
@@ -59,14 +59,14 @@ public class SparkCrossProductCheck implements PhysicalPlanResolver, Dispatcher
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
@SuppressWarnings("unchecked")
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof SparkTask) {
SparkWork sparkWork = ((SparkTask) currTask).getWork();
checkShuffleJoin(sparkWork);
checkMapJoin((SparkTask) currTask);
} else if (currTask instanceof ConditionalTask) {
- List<Task<? extends Serializable>> taskList = ((ConditionalTask) currTask).getListTasks();
- for (Task<? extends Serializable> task : taskList) {
+ List<Task<?>> taskList = ((ConditionalTask) currTask).getListTasks();
+ for (Task<?> task : taskList) {
dispatch(task, stack, nodeOutputs);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkDynamicPartitionPruningResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkDynamicPartitionPruningResolver.java
index 1241383..98d9c6a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkDynamicPartitionPruningResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkDynamicPartitionPruningResolver.java
@@ -81,7 +81,7 @@ public class SparkDynamicPartitionPruningResolver implements PhysicalPlanResolve
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
- Task<? extends Serializable> task = (Task<? extends Serializable>) nd;
+ Task<?> task = (Task<?>) nd;
// If the given Task is a SparkTask then search its Work DAG for SparkPartitionPruningSinkOperator
if (task instanceof SparkTask) {
@@ -124,12 +124,12 @@ public class SparkDynamicPartitionPruningResolver implements PhysicalPlanResolve
* Recursively go through the children of the given {@link Task} and check if any child {@link SparkTask} contains
* the specified {@link MapWork} object.
*/
- private boolean taskContainsDependentMapWork(Task<? extends Serializable> task,
+ private boolean taskContainsDependentMapWork(Task<?> task,
MapWork work) throws SemanticException {
if (task == null || task.getChildTasks() == null) {
return false;
}
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
if (childTask != null && childTask instanceof SparkTask && childTask.getMapWork().contains(work)) {
return true;
} else if (taskContainsDependentMapWork(childTask, work)) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java
index 55b46e5..97cb4a4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkMapJoinResolver.java
@@ -61,7 +61,7 @@ import org.apache.hadoop.hive.ql.plan.SparkWork;
public class SparkMapJoinResolver implements PhysicalPlanResolver {
// prevents a task from being processed multiple times
- private final Set<Task<? extends Serializable>> visitedTasks = new HashSet<>();
+ private final Set<Task<?>> visitedTasks = new HashSet<>();
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
@@ -245,11 +245,11 @@ public class SparkMapJoinResolver implements PhysicalPlanResolver {
}
} else {
if (originalTask != resultTask) {
- List<Task<? extends Serializable>> parentTasks = originalTask.getParentTasks();
+ List<Task<?>> parentTasks = originalTask.getParentTasks();
if (parentTasks != null && parentTasks.size() > 0) {
// avoid concurrent modification
- originalTask.setParentTasks(new ArrayList<Task<? extends Serializable>>());
- for (Task<? extends Serializable> parentTask : parentTasks) {
+ originalTask.setParentTasks(new ArrayList<Task<?>>());
+ for (Task<?> parentTask : parentTasks) {
parentTask.addDependentTask(resultTask);
parentTask.removeDependentTask(originalTask);
}
@@ -271,12 +271,12 @@ public class SparkMapJoinResolver implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nos)
throws SemanticException {
- Task<? extends Serializable> currentTask = (Task<? extends Serializable>) nd;
+ Task<?> currentTask = (Task<?>) nd;
if(currentTask.isMapRedTask()) {
if (currentTask instanceof ConditionalTask) {
- List<Task<? extends Serializable>> taskList =
+ List<Task<?>> taskList =
((ConditionalTask) currentTask).getListTasks();
- for (Task<? extends Serializable> tsk : taskList) {
+ for (Task<?> tsk : taskList) {
if (tsk instanceof SparkTask) {
processCurrentTask((SparkTask) tsk, (ConditionalTask) currentTask);
visitedTasks.add(tsk);
@@ -350,7 +350,7 @@ public class SparkMapJoinResolver implements PhysicalPlanResolver {
ConditionalWork conditionalWork = conditionalTask.getWork();
SparkWork originWork = originalTask.getWork();
SparkWork newWork = newTask.getWork();
- List<Task<? extends Serializable>> listTask = conditionalTask.getListTasks();
+ List<Task<?>> listTask = conditionalTask.getListTasks();
List<Serializable> listWork = (List<Serializable>) conditionalWork.getListWorks();
int taskIndex = listTask.indexOf(originalTask);
int workIndex = listWork.indexOf(originWork);
@@ -365,15 +365,15 @@ public class SparkMapJoinResolver implements PhysicalPlanResolver {
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx context =
(ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx) conditionalTask
.getResolverCtx();
- HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap = context
+ HashMap<Path, Task<?>> bigKeysDirToTaskMap = context
.getDirToTaskMap();
// to avoid concurrent modify the hashmap
- HashMap<Path, Task<? extends Serializable>> newbigKeysDirToTaskMap =
- new HashMap<Path, Task<? extends Serializable>>();
+ HashMap<Path, Task<?>> newbigKeysDirToTaskMap =
+ new HashMap<Path, Task<?>>();
// reset the resolver
- for (Map.Entry<Path, Task<? extends Serializable>> entry :
+ for (Map.Entry<Path, Task<?>> entry :
bigKeysDirToTaskMap.entrySet()) {
- Task<? extends Serializable> task = entry.getValue();
+ Task<?> task = entry.getValue();
Path bigKeyDir = entry.getKey();
if (task.equals(originalTask)) {
newbigKeysDirToTaskMap.put(bigKeyDir, newTask);
@@ -384,7 +384,7 @@ public class SparkMapJoinResolver implements PhysicalPlanResolver {
context.setDirToTaskMap(newbigKeysDirToTaskMap);
// update no skew task
if (context.getNoSkewTask() != null && context.getNoSkewTask().equals(originalTask)) {
- List<Task<? extends Serializable>> noSkewTask = new ArrayList<>();
+ List<Task<?>> noSkewTask = new ArrayList<>();
noSkewTask.add(newTask);
context.setNoSkewTask(noSkewTask);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/StageIDsRearranger.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/StageIDsRearranger.java
index 00de47b..6c87475 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/StageIDsRearranger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/StageIDsRearranger.java
@@ -70,14 +70,14 @@ public class StageIDsRearranger implements PhysicalPlanResolver {
}
}
};
- for (Task<? extends Serializable> task : tasks) {
+ for (Task<?> task : tasks) {
traverse.traverse(task);
}
return sources;
}
public static List<Task> getExplainOrder(HiveConf conf, List<Task<?>> tasks) {
- for (Task<? extends Serializable> task : tasks) {
+ for (Task<?> task : tasks) {
task.setRootTask(true);
}
String var = conf.getVar(HiveConf.ConfVars.HIVESTAGEIDREARRANGE);
@@ -122,7 +122,7 @@ public class StageIDsRearranger implements PhysicalPlanResolver {
return type == ArrangeType.NONE || type == ArrangeType.IDONLY || super.isReady(task);
}
};
- for (Task<? extends Serializable> task : tasks) {
+ for (Task<?> task : tasks) {
traverse.traverse(task);
}
return new ArrayList<Task>(traverse.traversed);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index a846d9b..4cc02b4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -989,7 +989,7 @@ public class Vectorizer implements PhysicalPlanResolver {
@Override
public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
throws SemanticException {
- Task<? extends Serializable> currTask = (Task<? extends Serializable>) nd;
+ Task<?> currTask = (Task<?>) nd;
if (currTask instanceof MapRedTask) {
MapredWork mapredWork = ((MapRedTask) currTask).getWork();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java
index a7c18b0..54b2550 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinProcFactory.java
@@ -83,7 +83,7 @@ public class SparkSkewJoinProcFactory {
Object... nodeOutputs) throws SemanticException {
SparkSkewJoinResolver.SparkSkewJoinProcCtx context =
(SparkSkewJoinResolver.SparkSkewJoinProcCtx) procCtx;
- Task<? extends Serializable> currentTsk = context.getCurrentTask();
+ Task<?> currentTsk = context.getCurrentTask();
JoinOperator op = (JoinOperator) nd;
ReduceWork reduceWork = context.getReducerToReduceWork().get(op);
ParseContext parseContext = context.getParseCtx();
@@ -170,11 +170,11 @@ public class SparkSkewJoinProcFactory {
tableScanOp, mapWork, false, tableDesc);
// insert the new task between current task and its child
@SuppressWarnings("unchecked")
- Task<? extends Serializable> newTask = TaskFactory.get(newWork);
- List<Task<? extends Serializable>> childTasks = currentTask.getChildTasks();
+ Task<?> newTask = TaskFactory.get(newWork);
+ List<Task<?>> childTasks = currentTask.getChildTasks();
// must have at most one child
if (childTasks != null && childTasks.size() > 0) {
- Task<? extends Serializable> childTask = childTasks.get(0);
+ Task<?> childTask = childTasks.get(0);
currentTask.removeDependentTask(childTask);
newTask.addDependentTask(childTask);
}
@@ -224,11 +224,11 @@ public class SparkSkewJoinProcFactory {
}
private static boolean supportRuntimeSkewJoin(JoinOperator joinOp, ReduceWork reduceWork,
- Task<? extends Serializable> currTask, HiveConf hiveConf) {
+ Task<?> currTask, HiveConf hiveConf) {
if (currTask instanceof SparkTask &&
GenMRSkewJoinProcessor.skewJoinEnabled(hiveConf, joinOp)) {
SparkWork sparkWork = ((SparkTask) currTask).getWork();
- List<Task<? extends Serializable>> children = currTask.getChildTasks();
+ List<Task<?>> children = currTask.getChildTasks();
return !joinOp.getConf().isFixedAsSorted() && sparkWork.contains(reduceWork) &&
(children == null || children.size() <= 1) &&
OperatorUtils.getOp(reduceWork, CommonJoinOperator.class).size() == 1;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java
index 089438a..fbd53e0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SparkSkewJoinResolver.java
@@ -77,7 +77,7 @@ public class SparkSkewJoinResolver implements PhysicalPlanResolver {
throws SemanticException {
@SuppressWarnings("unchecked")
- Task<? extends Serializable> task = (Task<? extends Serializable>) nd;
+ Task<?> task = (Task<?>) nd;
if (task instanceof SparkTask) {
SparkWork sparkWork = ((SparkTask) task).getWork();
SparkSkewJoinProcCtx skewJoinProcCtx =
@@ -114,7 +114,7 @@ public class SparkSkewJoinResolver implements PhysicalPlanResolver {
// need a map from the reducer to the corresponding ReduceWork
private Map<Operator<?>, ReduceWork> reducerToReduceWork;
- public SparkSkewJoinProcCtx(Task<? extends Serializable> task,
+ public SparkSkewJoinProcCtx(Task<?> task,
ParseContext parseCtx) {
super(task, parseCtx);
reducerToReduceWork = new HashMap<Operator<?>, ReduceWork>();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
index 3e1f85d..4d0331d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/spark/SplitSparkWorkResolver.java
@@ -54,7 +54,7 @@ import com.google.common.base.Preconditions;
public class SplitSparkWorkResolver implements PhysicalPlanResolver {
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
- for (Task<? extends Serializable> task : pctx.getRootTasks()) {
+ for (Task<?> task : pctx.getRootTasks()) {
if (task instanceof SparkTask) {
splitSparkWork(((SparkTask) task).getWork());
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java
index 32b89a7..4971b06 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java
@@ -32,6 +32,6 @@ HiveSemanticAnalyzerHook {
}
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 95bed20..684dec3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -120,7 +120,7 @@ public abstract class BaseSemanticAnalyzer {
protected final Hive db;
protected final HiveConf conf;
protected final QueryState queryState;
- protected List<Task<? extends Serializable>> rootTasks;
+ protected List<Task<?>> rootTasks;
protected FetchTask fetchTask;
protected final Logger LOG;
protected final LogHelper console;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
index 6e05ced..8aae641 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -93,11 +93,11 @@ public class EximUtil {
private Hive db;
private Set<ReadEntity> inputs;
private Set<WriteEntity> outputs;
- private List<Task<? extends Serializable>> tasks;
+ private List<Task<?>> tasks;
private Logger LOG;
private Context ctx;
private DumpType eventType = DumpType.EVENT_UNKNOWN;
- private Task<? extends Serializable> openTxnTask = null;
+ private Task<?> openTxnTask = null;
public HiveConf getConf() {
return conf;
@@ -115,7 +115,7 @@ public class EximUtil {
return outputs;
}
- public List<Task<? extends Serializable>> getTasks() {
+ public List<Task<?>> getTasks() {
return tasks;
}
@@ -138,7 +138,7 @@ public class EximUtil {
public SemanticAnalyzerWrapperContext(HiveConf conf, Hive db,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
- List<Task<? extends Serializable>> tasks,
+ List<Task<?>> tasks,
Logger LOG, Context ctx){
this.conf = conf;
this.db = db;
@@ -149,10 +149,10 @@ public class EximUtil {
this.ctx = ctx;
}
- public Task<? extends Serializable> getOpenTxnTask() {
+ public Task<?> getOpenTxnTask() {
return openTxnTask;
}
- public void setOpenTxnTask(Task<? extends Serializable> openTxnTask) {
+ public void setOpenTxnTask(Task<?> openTxnTask) {
this.openTxnTask = openTxnTask;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezProcContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezProcContext.java
index f977fc1..2c0d21a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezProcContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezProcContext.java
@@ -68,7 +68,7 @@ public class GenTezProcContext implements NodeProcessorCtx{
public final List<Task<MoveWork>> moveTask;
// rootTasks is the entry point for all generated tasks
- public final List<Task<? extends Serializable>> rootTasks;
+ public final List<Task<?>> rootTasks;
public final Set<ReadEntity> inputs;
public final Set<WriteEntity> outputs;
@@ -164,7 +164,7 @@ public class GenTezProcContext implements NodeProcessorCtx{
@SuppressWarnings("unchecked")
public GenTezProcContext(HiveConf conf, ParseContext parseContext,
- List<Task<MoveWork>> moveTask, List<Task<? extends Serializable>> rootTasks,
+ List<Task<MoveWork>> moveTask, List<Task<?>> rootTasks,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
this.conf = conf;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java
index 08aa7e0..65d08d7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java
@@ -72,5 +72,5 @@ public interface HiveSemanticAnalyzerHook extends Hook {
*/
public void postAnalyze(
HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException;
+ List<Task<?>> rootTasks) throws SemanticException;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
index e955989..ec75fa4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -548,7 +548,7 @@ public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer {
return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), dropTblDesc), x.getConf());
}
- private static Task<? extends Serializable> alterTableTask(ImportTableDesc tableDesc,
+ private static Task<?> alterTableTask(ImportTableDesc tableDesc,
EximUtil.SemanticAnalyzerWrapperContext x,
ReplicationSpec replicationSpec) {
tableDesc.setReplaceMode(true);
@@ -558,7 +558,7 @@ public class ImportSemanticAnalyzer extends BaseSemanticAnalyzer {
return tableDesc.getCreateTableTask(x.getInputs(), x.getOutputs(), x.getConf());
}
- private static Task<? extends Serializable> alterSinglePartition(
+ private static Task<?> alterSinglePartition(
ImportTableDesc tblDesc, Table table, Warehouse wh, AlterTableAddPartitionDesc addPartitionDesc,
ReplicationSpec replicationSpec, org.apache.hadoop.hive.ql.metadata.Partition ptn,
EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, IOException, HiveException {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
index 42637df..bcba4d7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MapReduceCompiler.java
@@ -93,7 +93,7 @@ public class MapReduceCompiler extends TaskCompiler {
// loop over all the tasks recursively
@Override
- protected void setInputFormat(Task<? extends Serializable> task) {
+ protected void setInputFormat(Task<?> task) {
if (task instanceof ExecDriver) {
MapWork work = ((MapredWork) task.getWork()).getMapWork();
Map<String, Operator<? extends OperatorDesc>> opMap = work.getAliasToWork();
@@ -103,15 +103,15 @@ public class MapReduceCompiler extends TaskCompiler {
}
}
} else if (task instanceof ConditionalTask) {
- List<Task<? extends Serializable>> listTasks
+ List<Task<?>> listTasks
= ((ConditionalTask) task).getListTasks();
- for (Task<? extends Serializable> tsk : listTasks) {
+ for (Task<?> tsk : listTasks) {
setInputFormat(tsk);
}
}
if (task.getChildTasks() != null) {
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
setInputFormat(childTask);
}
}
@@ -137,7 +137,7 @@ public class MapReduceCompiler extends TaskCompiler {
}
// loop over all the tasks recursively
- private void breakTaskTree(Task<? extends Serializable> task) {
+ private void breakTaskTree(Task<?> task) {
if (task instanceof ExecDriver) {
Map<String, Operator<? extends OperatorDesc>> opMap =
@@ -148,9 +148,9 @@ public class MapReduceCompiler extends TaskCompiler {
}
}
} else if (task instanceof ConditionalTask) {
- List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task)
+ List<Task<?>> listTasks = ((ConditionalTask) task)
.getListTasks();
- for (Task<? extends Serializable> tsk : listTasks) {
+ for (Task<?> tsk : listTasks) {
breakTaskTree(tsk);
}
}
@@ -159,7 +159,7 @@ public class MapReduceCompiler extends TaskCompiler {
return;
}
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
breakTaskTree(childTask);
}
}
@@ -191,7 +191,7 @@ public class MapReduceCompiler extends TaskCompiler {
}
@Override
- protected void decideExecMode(List<Task<? extends Serializable>> rootTasks, Context ctx,
+ protected void decideExecMode(List<Task<?>> rootTasks, Context ctx,
GlobalLimitCtx globalLimitCtx)
throws SemanticException {
@@ -271,13 +271,13 @@ public class MapReduceCompiler extends TaskCompiler {
}
@Override
- protected void optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
+ protected void optimizeTaskPlan(List<Task<?>> rootTasks,
ParseContext pCtx, Context ctx) throws SemanticException {
// reduce sink does not have any kids - since the plan by now has been
// broken up into multiple
// tasks, iterate over all tasks.
// For each task, go over all operators recursively
- for (Task<? extends Serializable> rootTask : rootTasks) {
+ for (Task<?> rootTask : rootTasks) {
breakTaskTree(rootTask);
}
@@ -291,7 +291,7 @@ public class MapReduceCompiler extends TaskCompiler {
}
@Override
- protected void generateTaskTree(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx,
+ protected void generateTaskTree(List<Task<?>> rootTasks, ParseContext pCtx,
List<Task<MoveWork>> mvTask, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
// generate map reduce plans
@@ -299,7 +299,7 @@ public class MapReduceCompiler extends TaskCompiler {
GenMRProcContext procCtx = new GenMRProcContext(
conf,
// Must be deterministic order map for consistent q-test output across Java versions
- new LinkedHashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>>(),
+ new LinkedHashMap<Operator<? extends OperatorDesc>, Task<?>>(),
tempParseContext, mvTask, rootTasks,
new LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx>(),
inputs, outputs);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
index 42dd594..24f593e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
@@ -110,7 +110,7 @@ public class ParseContext {
private GlobalLimitCtx globalLimitCtx;
private Set<ReadEntity> semanticInputs;
- private List<Task<? extends Serializable>> rootTasks;
+ private List<Task<?>> rootTasks;
private FetchTask fetchTask;
private QueryProperties queryProperties;
@@ -192,7 +192,7 @@ public class ParseContext {
Map<TableScanOperator, SampleDesc> opToSamplePruner,
GlobalLimitCtx globalLimitCtx,
Map<String, SplitSample> nameToSplitSample,
- Set<ReadEntity> semanticInputs, List<Task<? extends Serializable>> rootTasks,
+ Set<ReadEntity> semanticInputs, List<Task<?>> rootTasks,
Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner,
Map<String, ReadEntity> viewAliasToInput,
List<ReduceSinkOperator> reduceSinkOperatorsAddedByEnforceBucketingSorting,
@@ -508,8 +508,8 @@ public class ParseContext {
return semanticInputs;
}
- public void replaceRootTask(Task<? extends Serializable> rootTask,
- List<? extends Task<? extends Serializable>> tasks) {
+ public void replaceRootTask(Task<?> rootTask,
+ List<? extends Task<?>> tasks) {
this.rootTasks.remove(rootTask);
this.rootTasks.addAll(tasks);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index b0b650f..290d0ba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -121,7 +121,7 @@ public abstract class TaskCompiler {
@SuppressWarnings("nls")
public void compile(final ParseContext pCtx,
- final List<Task<? extends Serializable>> rootTasks,
+ final List<Task<?>> rootTasks,
final Set<ReadEntity> inputs, final Set<WriteEntity> outputs) throws SemanticException {
Context ctx = pCtx.getContext();
@@ -228,7 +228,7 @@ public abstract class TaskCompiler {
// The idea here is to keep an object reference both in FileSink and in FetchTask for list of files
// to be fetched. During Job close file sink will populate the list and fetch task later will use it
// to fetch the results.
- Collection<Operator<? extends OperatorDesc>> tableScanOps =
+ Collection<Operator<?>> tableScanOps =
Lists.<Operator<?>>newArrayList(pCtx.getTopOps().values());
Set<FileSinkOperator> fsOps = OperatorUtils.findOperators(tableScanOps, FileSinkOperator.class);
if(fsOps != null && fsOps.size() == 1) {
@@ -281,13 +281,13 @@ public abstract class TaskCompiler {
generateTaskTree(rootTasks, pCtx, mvTask, inputs, outputs);
// For each task, set the key descriptor for the reducer
- for (Task<? extends Serializable> rootTask : rootTasks) {
+ for (Task<?> rootTask : rootTasks) {
GenMapRedUtils.setKeyAndValueDescForTaskTree(rootTask);
}
// If a task contains an operator which instructs bucketizedhiveinputformat
// to be used, please do so
- for (Task<? extends Serializable> rootTask : rootTasks) {
+ for (Task<?> rootTask : rootTasks) {
setInputFormat(rootTask);
}
@@ -311,7 +311,7 @@ public abstract class TaskCompiler {
throw new SemanticException("Can not find correct root task!");
}
try {
- Task<? extends Serializable> root = rootTasks.iterator().next();
+ Task<?> root = rootTasks.iterator().next();
StatsTask tsk = (StatsTask) genTableStats(pCtx, pCtx.getTopOps().values()
.iterator().next(), root, outputs);
root.addDependentTask(tsk);
@@ -321,10 +321,10 @@ public abstract class TaskCompiler {
}
genColumnStatsTask(pCtx.getAnalyzeRewrite(), loadFileWork, map, outerQueryLimit, 0);
} else {
- Set<Task<? extends Serializable>> leafTasks = new LinkedHashSet<Task<? extends Serializable>>();
+ Set<Task<?>> leafTasks = new LinkedHashSet<Task<?>>();
getLeafTasks(rootTasks, leafTasks);
- List<Task<? extends Serializable>> nonStatsLeafTasks = new ArrayList<>();
- for (Task<? extends Serializable> tsk : leafTasks) {
+ List<Task<?>> nonStatsLeafTasks = new ArrayList<>();
+ for (Task<?> tsk : leafTasks) {
// map table name to the correct ColumnStatsTask
if (tsk instanceof StatsTask) {
map.put(extractTableFullName((StatsTask) tsk), (StatsTask) tsk);
@@ -333,8 +333,8 @@ public abstract class TaskCompiler {
}
}
// add cStatsTask as a dependent of all the nonStatsLeafTasks
- for (Task<? extends Serializable> tsk : nonStatsLeafTasks) {
- for (Task<? extends Serializable> cStatsTask : map.values()) {
+ for (Task<?> tsk : nonStatsLeafTasks) {
+ for (Task<?> cStatsTask : map.values()) {
tsk.addDependentTask(cStatsTask);
}
}
@@ -363,13 +363,13 @@ public abstract class TaskCompiler {
// generate a DDL task and make it a dependent task of the leaf
CreateTableDesc crtTblDesc = pCtx.getCreateTable();
crtTblDesc.validate(conf);
- Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(inputs, outputs, crtTblDesc));
+ Task<?> crtTblTask = TaskFactory.get(new DDLWork(inputs, outputs, crtTblDesc));
patchUpAfterCTASorMaterializedView(rootTasks, inputs, outputs, crtTblTask,
CollectionUtils.isEmpty(crtTblDesc.getPartColNames()));
} else if (pCtx.getQueryProperties().isMaterializedView()) {
// generate a DDL task and make it a dependent task of the leaf
CreateViewDesc viewDesc = pCtx.getCreateViewDesc();
- Task<? extends Serializable> crtViewTask = TaskFactory.get(new DDLWork(
+ Task<?> crtViewTask = TaskFactory.get(new DDLWork(
inputs, outputs, viewDesc));
patchUpAfterCTASorMaterializedView(rootTasks, inputs, outputs, crtViewTask,
CollectionUtils.isEmpty(viewDesc.getPartColNames()));
@@ -378,10 +378,10 @@ public abstract class TaskCompiler {
// of the tree.
MaterializedViewUpdateDesc materializedViewDesc = pCtx.getMaterializedViewUpdateDesc();
DDLWork ddlWork = new DDLWork(inputs, outputs, materializedViewDesc);
- Set<Task<? extends Serializable>> leafTasks = new LinkedHashSet<Task<? extends Serializable>>();
+ Set<Task<?>> leafTasks = new LinkedHashSet<Task<?>>();
getLeafTasks(rootTasks, leafTasks);
- Task<? extends Serializable> materializedViewTask = TaskFactory.get(ddlWork, conf);
- for (Task<? extends Serializable> task : leafTasks) {
+ Task<?> materializedViewTask = TaskFactory.get(ddlWork, conf);
+ for (Task<?> task : leafTasks) {
task.addDependentTask(materializedViewTask);
}
}
@@ -497,8 +497,8 @@ public abstract class TaskCompiler {
}
}
- private void patchUpAfterCTASorMaterializedView(List<Task<? extends Serializable>> rootTasks,
- Set<ReadEntity> inputs, Set<WriteEntity> outputs, Task<? extends Serializable> createTask,
+ private void patchUpAfterCTASorMaterializedView(List<Task<?>> rootTasks,
+ Set<ReadEntity> inputs, Set<WriteEntity> outputs, Task<?> createTask,
boolean createTaskAfterMoveTask) {
// clear the mapredWork output file from outputs for CTAS
// DDLWork at the tail of the chain will have the output
@@ -515,15 +515,15 @@ public abstract class TaskCompiler {
}
// find all leaf tasks and make the DDLTask as a dependent task on all of them
- Set<Task<? extends Serializable>> leaves = new LinkedHashSet<>();
+ Set<Task<?>> leaves = new LinkedHashSet<>();
getLeafTasks(rootTasks, leaves);
assert (leaves.size() > 0);
// Target task is supposed to be the last task
- Task<? extends Serializable> targetTask = createTask;
- for (Task<? extends Serializable> task : leaves) {
+ Task<?> targetTask = createTask;
+ for (Task<?> task : leaves) {
if (task instanceof StatsTask) {
// StatsTask require table to already exist
- for (Task<? extends Serializable> parentOfStatsTask : task.getParentTasks()) {
+ for (Task<?> parentOfStatsTask : task.getParentTasks()) {
if (parentOfStatsTask instanceof MoveTask && !createTaskAfterMoveTask) {
// For partitioned CTAS, we need to create the table before the move task
// as we need to create the partitions in metastore and for that we should
@@ -533,7 +533,7 @@ public abstract class TaskCompiler {
parentOfStatsTask.addDependentTask(createTask);
}
}
- for (Task<? extends Serializable> parentOfCrtTblTask : createTask.getParentTasks()) {
+ for (Task<?> parentOfCrtTblTask : createTask.getParentTasks()) {
parentOfCrtTblTask.removeDependentTask(task);
}
createTask.addDependentTask(task);
@@ -580,11 +580,11 @@ public abstract class TaskCompiler {
/**
* Makes dependentTask dependent of task.
*/
- private void interleaveTask(Task<? extends Serializable> dependentTask, Task<? extends Serializable> task) {
- for (Task<? extends Serializable> parentOfStatsTask : dependentTask.getParentTasks()) {
+ private void interleaveTask(Task<?> dependentTask, Task<?> task) {
+ for (Task<?> parentOfStatsTask : dependentTask.getParentTasks()) {
parentOfStatsTask.addDependentTask(task);
}
- for (Task<? extends Serializable> parentOfCrtTblTask : task.getParentTasks()) {
+ for (Task<?> parentOfCrtTblTask : task.getParentTasks()) {
parentOfCrtTblTask.removeDependentTask(dependentTask);
}
task.addDependentTask(dependentTask);
@@ -641,16 +641,16 @@ public abstract class TaskCompiler {
/**
* Find all leaf tasks of the list of root tasks.
*/
- private void getLeafTasks(List<Task<? extends Serializable>> rootTasks,
- Set<Task<? extends Serializable>> leaves) {
+ private void getLeafTasks(List<Task<?>> rootTasks,
+ Set<Task<?>> leaves) {
- for (Task<? extends Serializable> root : rootTasks) {
+ for (Task<?> root : rootTasks) {
getLeafTasks(root, leaves);
}
}
- private void getLeafTasks(Task<? extends Serializable> task,
- Set<Task<? extends Serializable>> leaves) {
+ private void getLeafTasks(Task<?> task,
+ Set<Task<?>> leaves) {
if (task.getDependentTasks() == null) {
if (!leaves.contains(task)) {
leaves.add(task);
@@ -663,7 +663,7 @@ public abstract class TaskCompiler {
/*
* Called to transform tasks into local tasks where possible/desirable
*/
- protected abstract void decideExecMode(List<Task<? extends Serializable>> rootTasks, Context ctx,
+ protected abstract void decideExecMode(List<Task<?>> rootTasks, Context ctx,
GlobalLimitCtx globalLimitCtx) throws SemanticException;
/*
@@ -676,18 +676,18 @@ public abstract class TaskCompiler {
/*
* Called after the tasks have been generated to run another round of optimization
*/
- protected abstract void optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks,
+ protected abstract void optimizeTaskPlan(List<Task<?>> rootTasks,
ParseContext pCtx, Context ctx) throws SemanticException;
/*
* Called to set the appropriate input format for tasks
*/
- protected abstract void setInputFormat(Task<? extends Serializable> rootTask);
+ protected abstract void setInputFormat(Task<?> rootTask);
/*
* Called to generate the taks tree from the parse context/operator tree
*/
- protected abstract void generateTaskTree(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx,
+ protected abstract void generateTaskTree(List<Task<?>> rootTasks, ParseContext pCtx,
List<Task<MoveWork>> mvTask, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException;
/*
@@ -717,7 +717,7 @@ public abstract class TaskCompiler {
/**
* Create a clone of the parse context
*/
- public ParseContext getParseContext(ParseContext pCtx, List<Task<? extends Serializable>> rootTasks) {
+ public ParseContext getParseContext(ParseContext pCtx, List<Task<?>> rootTasks) {
ParseContext clone = new ParseContext(queryState,
pCtx.getOpToPartPruner(), pCtx.getOpToPartList(), pCtx.getTopOps(),
pCtx.getJoinOps(), pCtx.getSmbMapJoinOps(),
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
index 916a172..7d58077 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
@@ -597,7 +597,7 @@ public class TezCompiler extends TaskCompiler {
}
@Override
- protected void generateTaskTree(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx,
+ protected void generateTaskTree(List<Task<?>> rootTasks, ParseContext pCtx,
List<Task<MoveWork>> mvTask, Set<ReadEntity> inputs, Set<WriteEntity> outputs)
throws SemanticException {
@@ -690,7 +690,7 @@ public class TezCompiler extends TaskCompiler {
}
@Override
- protected void setInputFormat(Task<? extends Serializable> task) {
+ protected void setInputFormat(Task<?> task) {
if (task instanceof TezTask) {
TezWork work = ((TezTask)task).getWork();
List<BaseWork> all = work.getAllWork();
@@ -706,15 +706,15 @@ public class TezCompiler extends TaskCompiler {
}
}
} else if (task instanceof ConditionalTask) {
- List<Task<? extends Serializable>> listTasks
+ List<Task<?>> listTasks
= ((ConditionalTask) task).getListTasks();
- for (Task<? extends Serializable> tsk : listTasks) {
+ for (Task<?> tsk : listTasks) {
setInputFormat(tsk);
}
}
if (task.getChildTasks() != null) {
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
setInputFormat(childTask);
}
}
@@ -737,7 +737,7 @@ public class TezCompiler extends TaskCompiler {
}
@Override
- protected void decideExecMode(List<Task<? extends Serializable>> rootTasks, Context ctx,
+ protected void decideExecMode(List<Task<?>> rootTasks, Context ctx,
GlobalLimitCtx globalLimitCtx)
throws SemanticException {
// currently all Tez work is on the cluster
@@ -745,7 +745,7 @@ public class TezCompiler extends TaskCompiler {
}
@Override
- protected void optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx,
+ protected void optimizeTaskPlan(List<Task<?>> rootTasks, ParseContext pCtx,
Context ctx) throws SemanticException {
PerfLogger perfLogger = SessionState.getPerfLogger();
perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.TEZ_COMPILER);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
index bc6b6b1..80680e9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java
@@ -37,40 +37,40 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
@LimitedPrivate(value = { "Apache Hive, Apache Sentry (incubating)" })
@Evolving
public interface HiveAuthorizationTaskFactory {
- Task<? extends Serializable> createCreateRoleTask(ASTNode node, Set<ReadEntity> inputs,
+ Task<?> createCreateRoleTask(ASTNode node, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createDropRoleTask(ASTNode node, Set<ReadEntity> inputs,
+ Task<?> createDropRoleTask(ASTNode node, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createShowRoleGrantTask(ASTNode node, Path resultFile,
+ Task<?> createShowRoleGrantTask(ASTNode node, Path resultFile,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createGrantRoleTask(ASTNode node, Set<ReadEntity> inputs,
+ Task<?> createGrantRoleTask(ASTNode node, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createRevokeRoleTask(ASTNode node, Set<ReadEntity> inputs,
+ Task<?> createRevokeRoleTask(ASTNode node, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createGrantTask(ASTNode node, Set<ReadEntity> inputs,
+ Task<?> createGrantTask(ASTNode node, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createShowGrantTask(ASTNode node, Path resultFile, Set<ReadEntity> inputs,
+ Task<?> createShowGrantTask(ASTNode node, Path resultFile, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createRevokeTask(ASTNode node, Set<ReadEntity> inputs,
+ Task<?> createRevokeTask(ASTNode node, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createSetRoleTask(String roleName,
+ Task<?> createSetRoleTask(String roleName,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createShowCurrentRoleTask(Set<ReadEntity> inputs,
+ Task<?> createShowCurrentRoleTask(Set<ReadEntity> inputs,
Set<WriteEntity> outputs, Path resFile) throws SemanticException;
- Task<? extends Serializable> createShowRolePrincipalsTask(ASTNode ast, Path resFile,
+ Task<?> createShowRolePrincipalsTask(ASTNode ast, Path resFile,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException;
- Task<? extends Serializable> createShowRolesTask(ASTNode ast, Path resFile,
+ Task<?> createShowRolesTask(ASTNode ast, Path resFile,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
index b122620..c2353c5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
@@ -73,21 +73,21 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
}
@Override
- public Task<? extends Serializable> createCreateRoleTask(ASTNode ast, Set<ReadEntity> inputs,
+ public Task<?> createCreateRoleTask(ASTNode ast, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) {
String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
CreateRoleDesc createRoleDesc = new CreateRoleDesc(roleName);
return TaskFactory.get(new DDLWork(inputs, outputs, createRoleDesc));
}
@Override
- public Task<? extends Serializable> createDropRoleTask(ASTNode ast, Set<ReadEntity> inputs,
+ public Task<?> createDropRoleTask(ASTNode ast, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) {
String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
DropRoleDesc dropRoleDesc = new DropRoleDesc(roleName);
return TaskFactory.get(new DDLWork(inputs, outputs, dropRoleDesc));
}
@Override
- public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile,
+ public Task<?> createShowRoleGrantTask(ASTNode ast, Path resultFile,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
ASTNode child = (ASTNode) ast.getChild(0);
PrincipalType principalType = PrincipalType.USER;
@@ -107,7 +107,7 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
return TaskFactory.get(new DDLWork(inputs, outputs, showRoleGrantDesc));
}
@Override
- public Task<? extends Serializable> createGrantTask(ASTNode ast, Set<ReadEntity> inputs,
+ public Task<?> createGrantTask(ASTNode ast, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef(
(ASTNode) ast.getChild(0));
@@ -135,7 +135,7 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
}
@Override
- public Task<? extends Serializable> createRevokeTask(ASTNode ast, Set<ReadEntity> inputs,
+ public Task<?> createRevokeTask(ASTNode ast, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException {
List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
@@ -153,7 +153,7 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc));
}
@Override
- public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, Set<ReadEntity> inputs,
+ public Task<?> createShowGrantTask(ASTNode ast, Path resultFile, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException {
PrincipalDesc principalDesc = null;
@@ -180,16 +180,16 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
return TaskFactory.get(new DDLWork(inputs, outputs, showGrant));
}
@Override
- public Task<? extends Serializable> createGrantRoleTask(ASTNode ast, Set<ReadEntity> inputs,
+ public Task<?> createGrantRoleTask(ASTNode ast, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) {
return analyzeGrantRevokeRole(true, ast, inputs, outputs);
}
@Override
- public Task<? extends Serializable> createRevokeRoleTask(ASTNode ast, Set<ReadEntity> inputs,
+ public Task<?> createRevokeRoleTask(ASTNode ast, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) {
return analyzeGrantRevokeRole(false, ast, inputs, outputs);
}
- private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast,
+ private Task<?> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
List<PrincipalDesc> principalDesc = AuthorizationParseUtils.analyzePrincipalListDef(
(ASTNode) ast.getChild(0));
@@ -335,21 +335,21 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
}
@Override
- public Task<? extends Serializable> createSetRoleTask(String roleName, Set<ReadEntity> inputs,
+ public Task<?> createSetRoleTask(String roleName, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException {
SetRoleDesc setRoleDesc = new SetRoleDesc(roleName);
return TaskFactory.get(new DDLWork(inputs, outputs, setRoleDesc));
}
@Override
- public Task<? extends Serializable> createShowCurrentRoleTask( Set<ReadEntity> inputs, Set<WriteEntity> outputs,
+ public Task<?> createShowCurrentRoleTask( Set<ReadEntity> inputs, Set<WriteEntity> outputs,
Path resFile) throws SemanticException {
ShowCurrentRoleDesc showCurrentRoleDesc = new ShowCurrentRoleDesc(resFile.toString());
return TaskFactory.get(new DDLWork(inputs, outputs, showCurrentRoleDesc));
}
@Override
- public Task<? extends Serializable> createShowRolePrincipalsTask(ASTNode ast, Path resFile, Set<ReadEntity> inputs,
+ public Task<?> createShowRolePrincipalsTask(ASTNode ast, Path resFile, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException {
String roleName;
@@ -365,7 +365,7 @@ public class HiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFa
}
@Override
- public Task<? extends Serializable> createShowRolesTask(ASTNode ast, Path resFile, Set<ReadEntity> inputs,
+ public Task<?> createShowRolesTask(ASTNode ast, Path resFile, Set<ReadEntity> inputs,
Set<WriteEntity> outputs) throws SemanticException {
ShowRolesDesc showRolesDesc = new ShowRolesDesc(resFile.toString());
return TaskFactory.get(new DDLWork(inputs, outputs, showRolesDesc));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AbortTxnHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AbortTxnHandler.java
index 599503a..b1c2709 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AbortTxnHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AbortTxnHandler.java
@@ -34,7 +34,7 @@ import java.util.List;
*/
public class AbortTxnHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
if (!AcidUtils.isAcidEnabled(context.hiveConf)) {
context.log.error("Cannot load transaction events as acid is not enabled");
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
index 39bd021..d8ed9e2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddForeignKeyHandler.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
public class AddForeignKeyHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AddForeignKeyMessage msg = deserializer.getAddForeignKeyMessage(context.dmd.getPayload());
@@ -48,7 +48,7 @@ public class AddForeignKeyHandler extends AbstractMessageHandler {
}
}
- List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> tasks = new ArrayList<Task<?>>();
if (fks.isEmpty()) {
return tasks;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
index e264417..39f896f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddNotNullConstraintHandler.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
public class AddNotNullConstraintHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AddNotNullConstraintMessage msg = deserializer.getAddNotNullConstraintMessage(context.dmd.getPayload());
@@ -48,7 +48,7 @@ public class AddNotNullConstraintHandler extends AbstractMessageHandler {
}
}
- List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> tasks = new ArrayList<Task<?>>();
if (nns.isEmpty()) {
return tasks;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
index 54a0638..5bfced0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddPrimaryKeyHandler.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
public class AddPrimaryKeyHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AddPrimaryKeyMessage msg = deserializer.getAddPrimaryKeyMessage(context.dmd.getPayload());
@@ -48,7 +48,7 @@ public class AddPrimaryKeyHandler extends AbstractMessageHandler {
}
}
- List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> tasks = new ArrayList<Task<?>>();
if (pks.isEmpty()) {
return tasks;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
index a48b411..9cf5ffa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AddUniqueConstraintHandler.java
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
public class AddUniqueConstraintHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AddUniqueConstraintMessage msg = deserializer.getAddUniqueConstraintMessage(context.dmd.getPayload());
@@ -48,7 +48,7 @@ public class AddUniqueConstraintHandler extends AbstractMessageHandler {
}
}
- List<Task<? extends Serializable>> tasks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> tasks = new ArrayList<Task<?>>();
if (uks.isEmpty()) {
return tasks;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AllocWriteIdHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AllocWriteIdHandler.java
index bb4402f..f9a0750 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AllocWriteIdHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AllocWriteIdHandler.java
@@ -34,7 +34,7 @@ import java.util.List;
*/
public class AllocWriteIdHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
if (!AcidUtils.isAcidEnabled(context.hiveConf)) {
context.log.error("Cannot load alloc write id event as acid is not enabled");
@@ -53,7 +53,7 @@ public class AllocWriteIdHandler extends AbstractMessageHandler {
ReplTxnWork work = new ReplTxnWork(HiveUtils.getReplPolicy(context.dbName), dbName, tableName,
ReplTxnWork.OperationType.REPL_ALLOC_WRITE_ID, msg.getTxnToWriteIdList(), context.eventOnlyReplicationSpec());
- Task<? extends Serializable> allocWriteIdTask = TaskFactory.get(work, context.hiveConf);
+ Task<?> allocWriteIdTask = TaskFactory.get(work, context.hiveConf);
context.log.info("Added alloc write id task : {}", allocWriteIdTask.getId());
updatedMetadata.set(context.dmd.getEventTo().toString(), dbName, tableName, null);
return Collections.singletonList(allocWriteIdTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
index f8a9bac..76c7dd5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
@@ -44,7 +44,7 @@ import java.util.Map;
*/
public class AlterDatabaseHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CommitTxnHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CommitTxnHandler.java
index 96ca221..86f1cb9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CommitTxnHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CommitTxnHandler.java
@@ -40,7 +40,7 @@ import java.util.List;
*/
public class CommitTxnHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
if (!AcidUtils.isAcidEnabled(context.hiveConf)) {
context.log.error("Cannot load transaction events as acid is not enabled");
@@ -49,7 +49,7 @@ public class CommitTxnHandler extends AbstractMessageHandler {
CommitTxnMessage msg = deserializer.getCommitTxnMessage(context.dmd.getPayload());
int numEntry = (msg.getTables() == null ? 0 : msg.getTables().size());
- List<Task<? extends Serializable>> tasks = new ArrayList<>();
+ List<Task<?>> tasks = new ArrayList<>();
String dbName = context.dbName;
String tableNamePrev = null;
String tblName = null;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
index 0a232a9..cc30131b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java
@@ -43,7 +43,7 @@ import java.util.List;
public class CreateDatabaseHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
MetaData metaData;
try {
@@ -80,4 +80,4 @@ public class CreateDatabaseHandler extends AbstractMessageHandler {
.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
return Collections.singletonList(createDBTask);
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
index ae66366..948d201 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
@@ -56,7 +56,7 @@ public class CreateFunctionHandler extends AbstractMessageHandler {
}
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
try {
FunctionDescBuilder builder = new FunctionDescBuilder(context);
@@ -91,7 +91,7 @@ public class CreateFunctionHandler extends AbstractMessageHandler {
* add the 'many' to parent/root tasks. The execution environment will make sure that the child barrier task will not get executed unless all parents of the barrier task are complete,
* which should only happen when the last task is finished, at which point the child of the barrier task is picked up.
*/
- Task<? extends Serializable> barrierTask =
+ Task<?> barrierTask =
TaskFactory.get(new DependencyCollectionWork(), context.hiveConf);
builder.replCopyTasks.forEach(t -> t.addDependentTask(barrierTask));
barrierTask.addDependentTask(createTask);
@@ -204,4 +204,4 @@ public class CreateFunctionHandler extends AbstractMessageHandler {
return destinationUri;
}
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DefaultHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DefaultHandler.java
index 64e6518..9f6d8ea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DefaultHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DefaultHandler.java
@@ -26,7 +26,7 @@ import java.util.List;
public class DefaultHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context withinContext)
+ public List<Task<?>> handle(Context withinContext)
throws SemanticException {
return new ArrayList<>();
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeletePartColStatHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeletePartColStatHandler.java
index f6153a6..b300c9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeletePartColStatHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeletePartColStatHandler.java
@@ -32,7 +32,7 @@ import java.util.List;
*/
public class DeletePartColStatHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
context.log.info("Replication of partition stat delete event is not supported yet");
if (!context.isDbNameEmpty()) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeleteTableColStatHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeleteTableColStatHandler.java
index 404372a..cb42cb6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeleteTableColStatHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DeleteTableColStatHandler.java
@@ -32,7 +32,7 @@ import java.util.List;
*/
public class DeleteTableColStatHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
context.log.info("Replication of table stat delete event is not supported yet");
if (!context.isDbNameEmpty()) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
index 233ff9e..0db9f19 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropConstraintHandler.java
@@ -30,7 +30,7 @@ import java.util.List;
public class DropConstraintHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
DropConstraintMessage msg = deserializer.getDropConstraintMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
index 4db6ab3..c10174a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropDatabaseHandler.java
@@ -31,13 +31,13 @@ import java.util.List;
public class DropDatabaseHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
DropDatabaseMessage msg =
deserializer.getDropDatabaseMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
DropDatabaseDesc desc = new DropDatabaseDesc(actualDbName, true, context.eventOnlyReplicationSpec());
- Task<? extends Serializable> dropDBTask =
+ Task<?> dropDBTask =
TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), desc), context.hiveConf);
context.log.info(
"Added drop database task : {}:{}", dropDBTask.getId(), desc.getDatabaseName());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
index 167679f..89cdaa5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
@@ -31,7 +31,7 @@ import java.util.List;
public class DropFunctionHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
DropFunctionMessage msg = deserializer.getDropFunctionMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
index 0df6815..f655597 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropPartitionHandler.java
@@ -34,7 +34,7 @@ import java.util.Map;
public class DropPartitionHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
try {
DropPartitionMessage msg = deserializer.getDropPartitionMessage(context.dmd.getPayload());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
index 6e29d61..0f1f05b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java
@@ -32,7 +32,7 @@ import java.util.List;
public class DropTableHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
String actualDbName;
String actualTblName;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/InsertHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/InsertHandler.java
index 1eeacbf..4b8274d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/InsertHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/InsertHandler.java
@@ -36,7 +36,7 @@ public class InsertHandler extends AbstractMessageHandler {
private static final Logger LOG = LoggerFactory.getLogger(InsertHandler.class);
@Override
- public List<Task<? extends Serializable>> handle(Context withinContext)
+ public List<Task<?>> handle(Context withinContext)
throws SemanticException {
try {
FileSystem fs =
@@ -59,7 +59,7 @@ public class InsertHandler extends AbstractMessageHandler {
// Piggybacking in Import logic for now
TableHandler tableHandler = new TableHandler();
- List<Task<? extends Serializable>> tasks = tableHandler.handle(currentContext);
+ List<Task<?>> tasks = tableHandler.handle(currentContext);
readEntitySet.addAll(tableHandler.readEntities());
writeEntitySet.addAll(tableHandler.writeEntities());
getUpdatedMetadata().copyUpdatedMetadata(tableHandler.getUpdatedMetadata());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/MessageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/MessageHandler.java
index ad3be67..2851880 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/MessageHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/MessageHandler.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData;
public interface MessageHandler {
- List<Task<? extends Serializable>> handle(Context withinContext) throws SemanticException;
+ List<Task<?>> handle(Context withinContext) throws SemanticException;
Set<ReadEntity> readEntities();
@@ -48,7 +48,7 @@ public interface MessageHandler {
class Context {
public String location;
public final String dbName;
- public final Task<? extends Serializable> precursor;
+ public final Task<?> precursor;
public DumpMetaData dmd;
final HiveConf hiveConf;
final Hive db;
@@ -56,7 +56,7 @@ public interface MessageHandler {
final Logger log;
public Context(String dbName, String location,
- Task<? extends Serializable> precursor, DumpMetaData dmd, HiveConf hiveConf,
+ Task<?> precursor, DumpMetaData dmd, HiveConf hiveConf,
Hive db, org.apache.hadoop.hive.ql.Context nestedContext, Logger log) {
this.dbName = dbName;
this.location = location;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/OpenTxnHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/OpenTxnHandler.java
index 6123371..cd7274d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/OpenTxnHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/OpenTxnHandler.java
@@ -34,7 +34,7 @@ import java.util.List;
*/
public class OpenTxnHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
if (!AcidUtils.isAcidEnabled(context.hiveConf)) {
context.log.error("Cannot load transaction events as acid is not enabled");
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
index 3216265..6dd6976 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenamePartitionHandler.java
@@ -36,7 +36,7 @@ import java.util.Map;
public class RenamePartitionHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
index 50958c8..c810b8c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
@@ -33,7 +33,7 @@ import java.util.List;
public class RenameTableHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
try {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TableHandler.java
index 664015f..266d034 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TableHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TableHandler.java
@@ -45,9 +45,9 @@ public class TableHandler extends AbstractMessageHandler {
private static final Logger LOG = LoggerFactory.getLogger(TableHandler.class);
@Override
- public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
+ public List<Task<?>> handle(Context context) throws SemanticException {
try {
- List<Task<? extends Serializable>> importTasks = new ArrayList<>();
+ List<Task<?>> importTasks = new ArrayList<>();
boolean isExternal = false, isLocationSet = false;
String parsedLocation = null;
@@ -82,9 +82,9 @@ public class TableHandler extends AbstractMessageHandler {
(context.precursor != null), parsedLocation, null, context.dbName,
null, context.location, x, updatedMetadata, context.getTxnMgr(), tuple.writeId);
- Task<? extends Serializable> openTxnTask = x.getOpenTxnTask();
+ Task<?> openTxnTask = x.getOpenTxnTask();
if (openTxnTask != null && !importTasks.isEmpty()) {
- for (Task<? extends Serializable> t : importTasks) {
+ for (Task<?> t : importTasks) {
openTxnTask.addDependentTask(t);
}
importTasks.add(openTxnTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
index 91687a0..1b1efbc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncatePartitionHandler.java
@@ -34,7 +34,7 @@ import java.util.Map;
public class TruncatePartitionHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
+ public List<Task<?>> handle(Context context) throws SemanticException {
AlterPartitionMessage msg = deserializer.getAlterPartitionMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String actualTblName = msg.getTable();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
index bcc15b4..c18529f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/TruncateTableHandler.java
@@ -30,7 +30,7 @@ import java.util.List;
public class TruncateTableHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context) throws SemanticException {
+ public List<Task<?>> handle(Context context) throws SemanticException {
AlterTableMessage msg = deserializer.getAlterTableMessage(context.dmd.getPayload());
String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
String actualTblName = msg.getTable();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdatePartColStatHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdatePartColStatHandler.java
index bea431c..d3d8d12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdatePartColStatHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdatePartColStatHandler.java
@@ -33,7 +33,7 @@ import java.util.List;
*/
public class UpdatePartColStatHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
UpdatePartitionColumnStatMessage upcsm =
deserializer.getUpdatePartitionColumnStatMessage(context.dmd.getPayload());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdateTableColStatHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdateTableColStatHandler.java
index 6160d43..139c50f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdateTableColStatHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/UpdateTableColStatHandler.java
@@ -33,7 +33,7 @@ import java.util.List;
*/
public class UpdateTableColStatHandler extends AbstractMessageHandler {
@Override
- public List<Task<? extends Serializable>> handle(Context context)
+ public List<Task<?>> handle(Context context)
throws SemanticException {
UpdateTableColumnStatMessage utcsm =
deserializer.getUpdateTableColumnStatMessage(context.dmd.getPayload());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
index e60f715..767e4cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkProcContext.java
@@ -67,7 +67,7 @@ public class GenSparkProcContext implements NodeProcessorCtx {
public final List<Task<MoveWork>> moveTask;
// rootTasks is the entry point for all generated tasks
- public final List<Task<? extends Serializable>> rootTasks;
+ public final List<Task<?>> rootTasks;
public final Set<ReadEntity> inputs;
public final Set<WriteEntity> outputs;
@@ -151,7 +151,7 @@ public class GenSparkProcContext implements NodeProcessorCtx {
public GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
- List<Task<? extends Serializable>> rootTasks,
+ List<Task<?>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String, TableScanOperator> topOps) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
index 757cb7a..c102a69 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/GenSparkUtils.java
@@ -404,7 +404,7 @@ public class GenSparkUtils {
* This is forked from {@link GenMapRedUtils}. The difference is that it doesn't check
* 'isLinkedFileSink' and does not set parent dir for the linked file sinks.
*/
- public static Path createMoveTask(Task<? extends Serializable> currTask, boolean chDir,
+ public static Path createMoveTask(Task<?> currTask, boolean chDir,
FileSinkOperator fsOp, ParseContext parseCtx, List<Task<MoveWork>> mvTasks,
HiveConf hconf, DependencyCollectionTask dependencyTask) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
index 6bc5925..24429b4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
@@ -361,7 +361,7 @@ public class SparkCompiler extends TaskCompiler {
* TODO: need to turn on rules that's commented out and add more if necessary.
*/
@Override
- protected void generateTaskTree(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx,
+ protected void generateTaskTree(List<Task<?>> rootTasks, ParseContext pCtx,
List<Task<MoveWork>> mvTask, Set<ReadEntity> inputs, Set<WriteEntity> outputs)
throws SemanticException {
PERF_LOGGER.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_GENERATE_TASK_TREE);
@@ -523,7 +523,7 @@ public class SparkCompiler extends TaskCompiler {
}
@Override
- protected void setInputFormat(Task<? extends Serializable> task) {
+ protected void setInputFormat(Task<?> task) {
if (task instanceof SparkTask) {
SparkWork work = ((SparkTask)task).getWork();
List<BaseWork> all = work.getAllWork();
@@ -539,15 +539,15 @@ public class SparkCompiler extends TaskCompiler {
}
}
} else if (task instanceof ConditionalTask) {
- List<Task<? extends Serializable>> listTasks
+ List<Task<?>> listTasks
= ((ConditionalTask) task).getListTasks();
- for (Task<? extends Serializable> tsk : listTasks) {
+ for (Task<?> tsk : listTasks) {
setInputFormat(tsk);
}
}
if (task.getChildTasks() != null) {
- for (Task<? extends Serializable> childTask : task.getChildTasks()) {
+ for (Task<?> childTask : task.getChildTasks()) {
setInputFormat(childTask);
}
}
@@ -567,14 +567,14 @@ public class SparkCompiler extends TaskCompiler {
}
@Override
- protected void decideExecMode(List<Task<? extends Serializable>> rootTasks, Context ctx,
+ protected void decideExecMode(List<Task<?>> rootTasks, Context ctx,
GlobalLimitCtx globalLimitCtx) throws SemanticException {
// currently all Spark work is on the cluster
return;
}
@Override
- protected void optimizeTaskPlan(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx,
+ protected void optimizeTaskPlan(List<Task<?>> rootTasks, ParseContext pCtx,
Context ctx) throws SemanticException {
PERF_LOGGER.PerfLogBegin(CLASS_NAME, PerfLogger.SPARK_OPTIMIZE_TASK_TREE);
PhysicalContext physicalCtx = new PhysicalContext(conf, pCtx, pCtx.getContext(), rootTasks,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java
index 1983ab7..e07794f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java
@@ -38,6 +38,6 @@ public interface ConditionalResolver {
* opaque context
* @return position of the task
*/
- List<Task<? extends Serializable>> getTasks(HiveConf conf, Object ctx);
+ List<Task<?>> getTasks(HiveConf conf, Object ctx);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
index cc5baee..c1b9b27 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
@@ -50,10 +50,10 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Seria
public static class ConditionalResolverCommonJoinCtx implements Serializable {
private static final long serialVersionUID = 1L;
- private HashMap<Task<? extends Serializable>, Set<String>> taskToAliases;
+ private HashMap<Task<?>, Set<String>> taskToAliases;
Map<Path, List<String>> pathToAliases;
Map<String, Long> aliasToKnownSize;
- private Task<? extends Serializable> commonJoinTask;
+ private Task<?> commonJoinTask;
private Path localTmpDir;
private Path hdfsTmpDir;
@@ -61,19 +61,19 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Seria
public ConditionalResolverCommonJoinCtx() {
}
- public HashMap<Task<? extends Serializable>, Set<String>> getTaskToAliases() {
+ public HashMap<Task<?>, Set<String>> getTaskToAliases() {
return taskToAliases;
}
- public void setTaskToAliases(HashMap<Task<? extends Serializable>, Set<String>> taskToAliases) {
+ public void setTaskToAliases(HashMap<Task<?>, Set<String>> taskToAliases) {
this.taskToAliases = taskToAliases;
}
- public Task<? extends Serializable> getCommonJoinTask() {
+ public Task<?> getCommonJoinTask() {
return commonJoinTask;
}
- public void setCommonJoinTask(Task<? extends Serializable> commonJoinTask) {
+ public void setCommonJoinTask(Task<?> commonJoinTask) {
this.commonJoinTask = commonJoinTask;
}
@@ -129,12 +129,12 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Seria
}
@Override
- public List<Task<? extends Serializable>> getTasks(HiveConf conf, Object objCtx) {
+ public List<Task<?>> getTasks(HiveConf conf, Object objCtx) {
ConditionalResolverCommonJoinCtx ctx = ((ConditionalResolverCommonJoinCtx) objCtx).clone();
- List<Task<? extends Serializable>> resTsks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> resTsks = new ArrayList<Task<?>>();
// get aliasToPath and pass it to the heuristic
- Task<? extends Serializable> task = resolveDriverAlias(ctx, conf);
+ Task<?> task = resolveDriverAlias(ctx, conf);
if (task == null) {
// run common join task
@@ -151,7 +151,7 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Seria
return resTsks;
}
- private Task<? extends Serializable> resolveDriverAlias(ConditionalResolverCommonJoinCtx ctx, HiveConf conf) {
+ private Task<?> resolveDriverAlias(ConditionalResolverCommonJoinCtx ctx, HiveConf conf) {
try {
resolveUnknownSizes(ctx, conf);
return resolveMapJoinTask(ctx, conf);
@@ -161,20 +161,20 @@ public class ConditionalResolverCommonJoin implements ConditionalResolver, Seria
return null;
}
- protected Task<? extends Serializable> resolveMapJoinTask(
+ protected Task<?> resolveMapJoinTask(
ConditionalResolverCommonJoinCtx ctx, HiveConf conf) throws Exception {
Set<String> participants = getParticipants(ctx);
Map<String, Long> aliasToKnownSize = ctx.getAliasToKnownSize();
- Map<Task<? extends Serializable>, Set<String>> taskToAliases = ctx.getTaskToAliases();
+ Map<Task<?>, Set<String>> taskToAliases = ctx.getTaskToAliases();
long threshold = HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVESMALLTABLESFILESIZE);
Long bigTableSize = null;
Long smallTablesSize = null;
- Map.Entry<Task<? extends Serializable>, Set<String>> nextTask = null;
- for (Map.Entry<Task<? extends Serializable>, Set<String>> entry : taskToAliases.entrySet()) {
+ Map.Entry<Task<?>, Set<String>> nextTask = null;
+ for (Map.Entry<Task<?>, Set<String>> entry : taskToAliases.entrySet()) {
Set<String> aliases = entry.getValue();
long sumOfOthers = Utilities.sumOfExcept(aliasToKnownSize, participants, aliases);
if (sumOfOthers < 0 || sumOfOthers > threshold) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
index a828809..da1376c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
@@ -56,7 +56,7 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
*/
public static class ConditionalResolverMergeFilesCtx implements Serializable {
private static final long serialVersionUID = 1L;
- List<Task<? extends Serializable>> listTasks;
+ List<Task<?>> listTasks;
private String dir;
private DynamicPartitionCtx dpCtx; // merge task could be after dynamic partition insert
private ListBucketingCtx lbCtx;
@@ -68,7 +68,7 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
* @param dir
*/
public ConditionalResolverMergeFilesCtx(
- List<Task<? extends Serializable>> listTasks, String dir) {
+ List<Task<?>> listTasks, String dir) {
this.listTasks = listTasks;
this.dir = dir;
}
@@ -83,7 +83,7 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
/**
* @return the listTasks
*/
- public List<Task<? extends Serializable>> getListTasks() {
+ public List<Task<?>> getListTasks() {
return listTasks;
}
@@ -91,7 +91,7 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
* @param listTasks
* the listTasks to set
*/
- public void setListTasks(List<Task<? extends Serializable>> listTasks) {
+ public void setListTasks(List<Task<?>> listTasks) {
this.listTasks = listTasks;
}
@@ -118,11 +118,11 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
}
}
- public List<Task<? extends Serializable>> getTasks(HiveConf conf, Object objCtx) {
+ public List<Task<?>> getTasks(HiveConf conf, Object objCtx) {
ConditionalResolverMergeFilesCtx ctx = (ConditionalResolverMergeFilesCtx) objCtx;
String dirName = ctx.getDir();
- List<Task<? extends Serializable>> resTsks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> resTsks = new ArrayList<Task<?>>();
// check if a map-reduce job is needed to merge the files
// If the current size is smaller than the target, merge
long trgtSize = conf.getLongVar(HiveConf.ConfVars.HIVEMERGEMAPFILESSIZE);
@@ -130,9 +130,9 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
.getLongVar(HiveConf.ConfVars.HIVEMERGEMAPFILESAVGSIZE);
trgtSize = Math.max(trgtSize, avgConditionSize);
- Task<? extends Serializable> mvTask = ctx.getListTasks().get(0);
- Task<? extends Serializable> mrTask = ctx.getListTasks().get(1);
- Task<? extends Serializable> mrAndMvTask = ctx.getListTasks().get(2);
+ Task<?> mvTask = ctx.getListTasks().get(0);
+ Task<?> mrTask = ctx.getListTasks().get(1);
+ Task<?> mrAndMvTask = ctx.getListTasks().get(2);
try {
Path dirPath = new Path(dirName);
@@ -228,9 +228,9 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
* @param dpLbLevel
* @throws IOException
*/
- private void generateActualTasks(HiveConf conf, List<Task<? extends Serializable>> resTsks,
- long trgtSize, long avgConditionSize, Task<? extends Serializable> mvTask,
- Task<? extends Serializable> mrTask, Task<? extends Serializable> mrAndMvTask, Path dirPath,
+ private void generateActualTasks(HiveConf conf, List<Task<?>> resTsks,
+ long trgtSize, long avgConditionSize, Task<?> mvTask,
+ Task<?> mrTask, Task<?> mrAndMvTask, Path dirPath,
FileSystem inpFs, ConditionalResolverMergeFilesCtx ctx, MapWork work, int dpLbLevel)
throws IOException {
DynamicPartitionCtx dpCtx = ctx.getDPCtx();
@@ -301,7 +301,7 @@ public class ConditionalResolverMergeFiles implements ConditionalResolver,
// will contain different MoveWork objects, which causes problems.
// Not just in this case, but also in general the child move task of the mrAndMvTask should
// be used, because that is the correct move task for the "merge and move" use case.
- Task<? extends Serializable> mergeAndMoveMoveTask = mrAndMvTask.getChildTasks().get(0);
+ Task<?> mergeAndMoveMoveTask = mrAndMvTask.getChildTasks().get(0);
MoveWork mvWork = (MoveWork) mergeAndMoveMoveTask.getWork();
LoadFileDesc lfd = mvWork.getLoadFileWork();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
index 5dd7a25..0ca5caf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
@@ -54,8 +54,8 @@ public class ConditionalResolverSkewJoin implements ConditionalResolver, Seriali
// tables into corresponding different dirs (one dir per table).
// this map stores mapping from "big key dir" to its corresponding mapjoin
// task.
- private HashMap<Path, Task<? extends Serializable>> dirToTaskMap;
- private List<Task<? extends Serializable>> noSkewTask;
+ private HashMap<Path, Task<?>> dirToTaskMap;
+ private List<Task<?>> noSkewTask;
/**
* For serialization use only.
@@ -64,27 +64,27 @@ public class ConditionalResolverSkewJoin implements ConditionalResolver, Seriali
}
public ConditionalResolverSkewJoinCtx(
- HashMap<Path, Task<? extends Serializable>> dirToTaskMap,
- List<Task<? extends Serializable>> noSkewTask) {
+ HashMap<Path, Task<?>> dirToTaskMap,
+ List<Task<?>> noSkewTask) {
super();
this.dirToTaskMap = dirToTaskMap;
this.noSkewTask = noSkewTask;
}
- public HashMap<Path, Task<? extends Serializable>> getDirToTaskMap() {
+ public HashMap<Path, Task<?>> getDirToTaskMap() {
return dirToTaskMap;
}
public void setDirToTaskMap(
- HashMap<Path, Task<? extends Serializable>> dirToTaskMap) {
+ HashMap<Path, Task<?>> dirToTaskMap) {
this.dirToTaskMap = dirToTaskMap;
}
- public List<Task<? extends Serializable>> getNoSkewTask() {
+ public List<Task<?>> getNoSkewTask() {
return noSkewTask;
}
- public void setNoSkewTask(List<Task<? extends Serializable>> noSkewTask) {
+ public void setNoSkewTask(List<Task<?>> noSkewTask) {
this.noSkewTask = noSkewTask;
}
}
@@ -93,26 +93,26 @@ public class ConditionalResolverSkewJoin implements ConditionalResolver, Seriali
}
@Override
- public List<Task<? extends Serializable>> getTasks(HiveConf conf,
+ public List<Task<?>> getTasks(HiveConf conf,
Object objCtx) {
ConditionalResolverSkewJoinCtx ctx = (ConditionalResolverSkewJoinCtx) objCtx;
- List<Task<? extends Serializable>> resTsks = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> resTsks = new ArrayList<Task<?>>();
- Map<Path, Task<? extends Serializable>> dirToTaskMap = ctx
+ Map<Path, Task<?>> dirToTaskMap = ctx
.getDirToTaskMap();
- Iterator<Entry<Path, Task<? extends Serializable>>> bigKeysPathsIter = dirToTaskMap
+ Iterator<Entry<Path, Task<?>>> bigKeysPathsIter = dirToTaskMap
.entrySet().iterator();
try {
while (bigKeysPathsIter.hasNext()) {
- Entry<Path, Task<? extends Serializable>> entry = bigKeysPathsIter.next();
+ Entry<Path, Task<?>> entry = bigKeysPathsIter.next();
Path dirPath = entry.getKey();
FileSystem inpFs = dirPath.getFileSystem(conf);
FileStatus[] fstatus = Utilities.listStatusIfExists(dirPath, inpFs);
if (fstatus != null && fstatus.length > 0) {
- Task <? extends Serializable> task = entry.getValue();
- List<Task <? extends Serializable>> parentOps = task.getParentTasks();
+ Task <?> task = entry.getValue();
+ List<Task <?>> parentOps = task.getParentTasks();
if(parentOps!=null){
- for(Task <? extends Serializable> parentOp: parentOps){
+ for(Task <?> parentOp: parentOps){
//right now only one parent
resTsks.add(parentOp);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
index b821cb9..847d413 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
@@ -327,7 +327,7 @@ public class ImportTableDesc {
return dbName;
}
- public Task<? extends Serializable> getCreateTableTask(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
+ public Task<?> getCreateTableTask(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
HiveConf conf) {
switch (getDescType()) {
case TABLE:
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index fbf948c..18b5f27 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -574,7 +574,7 @@ public class TestUtilities {
verify(pool).shutdownNow();
}
- private Task<? extends Serializable> getDependencyCollectionTask(){
+ private Task<?> getDependencyCollectionTask(){
return TaskFactory.get(new DependencyCollectionWork());
}
@@ -587,7 +587,7 @@ public class TestUtilities {
* \ /
* ---->DTc----
*/
- private List<Task<? extends Serializable>> getTestDiamondTaskGraph(Task<? extends Serializable> providedTask){
+ private List<Task<?>> getTestDiamondTaskGraph(Task<?> providedTask){
// Note: never instantiate a task without TaskFactory.get() if you're not
// okay with .equals() breaking. Doing it via TaskFactory.get makes sure
// that an id is generated, and two tasks of the same type don't show
@@ -595,12 +595,12 @@ public class TestUtilities {
// array. Without this, DTa, DTb, and DTc would show up as one item in
// the list of children. Thus, we're instantiating via a helper method
// that instantiates via TaskFactory.get()
- Task<? extends Serializable> root = getDependencyCollectionTask();
- Task<? extends Serializable> DTa = getDependencyCollectionTask();
- Task<? extends Serializable> DTb = getDependencyCollectionTask();
- Task<? extends Serializable> DTc = getDependencyCollectionTask();
- Task<? extends Serializable> DTd = getDependencyCollectionTask();
- Task<? extends Serializable> DTe = getDependencyCollectionTask();
+ Task<?> root = getDependencyCollectionTask();
+ Task<?> DTa = getDependencyCollectionTask();
+ Task<?> DTb = getDependencyCollectionTask();
+ Task<?> DTc = getDependencyCollectionTask();
+ Task<?> DTd = getDependencyCollectionTask();
+ Task<?> DTe = getDependencyCollectionTask();
root.addDependentTask(DTa);
root.addDependentTask(DTb);
@@ -614,7 +614,7 @@ public class TestUtilities {
providedTask.addDependentTask(DTe);
- List<Task<? extends Serializable>> retVals = new ArrayList<Task<? extends Serializable>>();
+ List<Task<?>> retVals = new ArrayList<Task<?>>();
retVals.add(root);
return retVals;
}
@@ -626,21 +626,21 @@ public class TestUtilities {
*/
public class CountingWrappingTask extends DependencyCollectionTask {
int count;
- Task<? extends Serializable> wrappedDep = null;
+ Task<?> wrappedDep = null;
- public CountingWrappingTask(Task<? extends Serializable> dep) {
+ public CountingWrappingTask(Task<?> dep) {
count = 0;
wrappedDep = dep;
super.addDependentTask(wrappedDep);
}
@Override
- public boolean addDependentTask(Task<? extends Serializable> dependent) {
+ public boolean addDependentTask(Task<?> dependent) {
return wrappedDep.addDependentTask(dependent);
}
@Override
- public List<Task<? extends Serializable>> getDependentTasks() {
+ public List<Task<?>> getDependentTasks() {
count++;
System.err.println("YAH:getDepTasks got called!");
(new Exception()).printStackTrace(System.err);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
index 166cf87..bf11362 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/AddDependencyToLeavesTest.java
@@ -53,21 +53,21 @@ public class AddDependencyToLeavesTest {
Task<DependencyCollectionWork> collectionWorkTaskThree =
TaskFactory.get(new DependencyCollectionWork());
- @SuppressWarnings("unchecked") Task<? extends Serializable> rootTask = mock(Task.class);
+ @SuppressWarnings("unchecked") Task<?> rootTask = mock(Task.class);
when(rootTask.getDependentTasks())
.thenReturn(
Arrays.asList(collectionWorkTaskOne, collectionWorkTaskTwo, collectionWorkTaskThree));
- @SuppressWarnings("unchecked") List<Task<? extends Serializable>> tasksPostCurrentGraph =
+ @SuppressWarnings("unchecked") List<Task<?>> tasksPostCurrentGraph =
Arrays.asList(mock(Task.class), mock(Task.class));
DAGTraversal.traverse(Collections.singletonList(rootTask),
new AddDependencyToLeaves(tasksPostCurrentGraph));
- List<Task<? extends Serializable>> dependentTasksForOne =
+ List<Task<?>> dependentTasksForOne =
collectionWorkTaskOne.getDependentTasks();
- List<Task<? extends Serializable>> dependentTasksForTwo =
+ List<Task<?>> dependentTasksForTwo =
collectionWorkTaskTwo.getDependentTasks();
- List<Task<? extends Serializable>> dependentTasksForThree =
+ List<Task<?>> dependentTasksForThree =
collectionWorkTaskThree.getDependentTasks();
assertEquals(dependentTasksForOne.size(), 2);
@@ -83,4 +83,4 @@ public class AddDependencyToLeavesTest {
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java
index 41ab447..bb9999d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/TestTaskTracker.java
@@ -32,7 +32,7 @@ import static org.junit.Assert.assertTrue;
@RunWith(PowerMockRunner.class)
public class TestTaskTracker {
@Mock
- private Task<? extends Serializable> task;
+ private Task<?> task;
@Test
public void taskTrackerCompositionInitializesTheMaxTasksCorrectly() {
@@ -44,4 +44,4 @@ import static org.junit.Assert.assertTrue;
TaskTracker taskTracker2 = new TaskTracker(taskTracker);
assertFalse(taskTracker2.canAddMoreTasks());
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/util/DAGTraversalTest.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/util/DAGTraversalTest.java
index 6dcecde..f482e3b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/util/DAGTraversalTest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/util/DAGTraversalTest.java
@@ -37,24 +37,24 @@ public class DAGTraversalTest {
int count = 0;
@Override
- public void process(Task<? extends Serializable> task) {
+ public void process(Task<?> task) {
if (task.getDependentTasks() == null || task.getDependentTasks().isEmpty()) {
count++;
}
}
@Override
- public boolean skipProcessing(Task<? extends Serializable> task) {
+ public boolean skipProcessing(Task<?> task) {
return false;
}
}
@Test
public void shouldCountNumberOfLeafNodesCorrectly() {
- Task<? extends Serializable> taskWith5NodeTree = linearTree(5);
- Task<? extends Serializable> taskWith1NodeTree = linearTree(1);
- Task<? extends Serializable> taskWith3NodeTree = linearTree(3);
- @SuppressWarnings("unchecked") Task<? extends Serializable> rootTask = mock(Task.class);
+ Task<?> taskWith5NodeTree = linearTree(5);
+ Task<?> taskWith1NodeTree = linearTree(1);
+ Task<?> taskWith3NodeTree = linearTree(3);
+ @SuppressWarnings("unchecked") Task<?> rootTask = mock(Task.class);
when(rootTask.getDependentTasks())
.thenReturn(Arrays.asList(taskWith1NodeTree, taskWith3NodeTree, taskWith5NodeTree));
@@ -63,10 +63,10 @@ public class DAGTraversalTest {
assertEquals(3, function.count);
}
- private Task<? extends Serializable> linearTree(int numOfNodes) {
- Task<? extends Serializable> current = null, head = null;
+ private Task<?> linearTree(int numOfNodes) {
+ Task<?> current = null, head = null;
for (int i = 0; i < numOfNodes; i++) {
- @SuppressWarnings("unchecked") Task<? extends Serializable> task = mock(Task.class);
+ @SuppressWarnings("unchecked") Task<?> task = mock(Task.class);
if (current != null) {
when(current.getDependentTasks()).thenReturn(Collections.singletonList(task));
}
@@ -78,4 +78,4 @@ public class DAGTraversalTest {
return head;
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java
index a40ad24..a96d93e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/optimizer/TestGenMapRedUtilsCreateConditionalTask.java
@@ -59,7 +59,7 @@ import static org.mockito.Mockito.when;
public class TestGenMapRedUtilsCreateConditionalTask {
private static HiveConf hiveConf;
- private Task<? extends Serializable> dummyMRTask;
+ private Task<?> dummyMRTask;
@BeforeClass
public static void initializeSessionState() {
@@ -187,9 +187,9 @@ public class TestGenMapRedUtilsCreateConditionalTask {
GenMapRedUtils.createMRWorkForMergingFiles(fileSinkOperator, finalDirName, null,
moveTaskList, hiveConf, dummyMRTask, new LineageState());
ConditionalTask conditionalTask = (ConditionalTask)dummyMRTask.getChildTasks().get(0);
- Task<? extends Serializable> moveOnlyTask = conditionalTask.getListTasks().get(0);
- Task<? extends Serializable> mergeOnlyTask = conditionalTask.getListTasks().get(1);
- Task<? extends Serializable> mergeAndMoveTask = conditionalTask.getListTasks().get(2);
+ Task<?> moveOnlyTask = conditionalTask.getListTasks().get(0);
+ Task<?> mergeOnlyTask = conditionalTask.getListTasks().get(1);
+ Task<?> mergeAndMoveTask = conditionalTask.getListTasks().get(2);
/*
* OPTIMIZATION
@@ -228,9 +228,9 @@ public class TestGenMapRedUtilsCreateConditionalTask {
GenMapRedUtils.createMRWorkForMergingFiles(fileSinkOperator, finalDirName, null,
moveTaskList, hiveConf, dummyMRTask, new LineageState());
ConditionalTask conditionalTask = (ConditionalTask)dummyMRTask.getChildTasks().get(0);
- Task<? extends Serializable> moveOnlyTask = conditionalTask.getListTasks().get(0);
- Task<? extends Serializable> mergeOnlyTask = conditionalTask.getListTasks().get(1);
- Task<? extends Serializable> mergeAndMoveTask = conditionalTask.getListTasks().get(2);
+ Task<?> moveOnlyTask = conditionalTask.getListTasks().get(0);
+ Task<?> mergeOnlyTask = conditionalTask.getListTasks().get(1);
+ Task<?> mergeAndMoveTask = conditionalTask.getListTasks().get(2);
// Verify moveOnlyTask is NOT optimized
assertEquals(1, moveOnlyTask.getChildTasks().size());
@@ -263,9 +263,9 @@ public class TestGenMapRedUtilsCreateConditionalTask {
GenMapRedUtils.createMRWorkForMergingFiles(fileSinkOperator, finalDirName, null,
moveTaskList, hiveConf, dummyMRTask, new LineageState());
ConditionalTask conditionalTask = (ConditionalTask)dummyMRTask.getChildTasks().get(0);
- Task<? extends Serializable> moveOnlyTask = conditionalTask.getListTasks().get(0);
- Task<? extends Serializable> mergeOnlyTask = conditionalTask.getListTasks().get(1);
- Task<? extends Serializable> mergeAndMoveTask = conditionalTask.getListTasks().get(2);
+ Task<?> moveOnlyTask = conditionalTask.getListTasks().get(0);
+ Task<?> mergeOnlyTask = conditionalTask.getListTasks().get(1);
+ Task<?> mergeAndMoveTask = conditionalTask.getListTasks().get(2);
// Verify moveOnlyTask is NOT optimized
assertEquals(1, moveOnlyTask.getChildTasks().size());
@@ -309,7 +309,7 @@ public class TestGenMapRedUtilsCreateConditionalTask {
return moveTask;
}
- private void verifyMoveTask(Task<? extends Serializable> task, Path source, Path target) {
+ private void verifyMoveTask(Task<?> task, Path source, Path target) {
MoveTask moveTask = (MoveTask)task;
assertEquals(source, moveTask.getWork().getLoadFileWork().getSourcePath());
assertEquals(target, moveTask.getWork().getLoadFileWork().getTargetDir());
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
index b5d3b8f..888e4ef 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestGenTezWork.java
@@ -80,7 +80,7 @@ public class TestGenTezWork {
conf,
pctx,
Collections.EMPTY_LIST,
- new ArrayList<Task<? extends Serializable>>(),
+ new ArrayList<Task<?>>(),
Collections.EMPTY_SET,
Collections.EMPTY_SET);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java
index 3fc82ad..780fb2a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestConditionalResolverCommonJoin.java
@@ -58,8 +58,8 @@ public class TestConditionalResolverCommonJoin {
// joins alias1, alias2, alias3 (alias1 was not eligible for big pos)
// Must be deterministic order map for consistent q-test output across Java versions
- HashMap<Task<? extends Serializable>, Set<String>> taskToAliases =
- new LinkedHashMap<Task<? extends Serializable>, Set<String>>();
+ HashMap<Task<?>, Set<String>> taskToAliases =
+ new LinkedHashMap<Task<?>, Set<String>>();
taskToAliases.put(task1, new HashSet<String>(Arrays.asList("alias2")));
taskToAliases.put(task2, new HashSet<String>(Arrays.asList("alias3")));
@@ -88,4 +88,4 @@ public class TestConditionalResolverCommonJoin {
resolved = resolver.resolveMapJoinTask(ctx, conf);
Assert.assertNull(resolved);
}
-}
\ No newline at end of file
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
index 51f610d..bfa0efe 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java
@@ -196,7 +196,7 @@ public class TestReadEntityDirect {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
readEntities = context.getInputs();
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
index 97ef3c4..7836625 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java
@@ -45,7 +45,7 @@ public class TestViewEntity {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
readEntities = context.getInputs().toArray(new ReadEntity[0]);
}
diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
index fa61d3c..c9a57c5 100644
--- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
+++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
@@ -366,7 +366,7 @@ public abstract class CLIServiceTest {
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
- List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ List<Task<?>> rootTasks) throws SemanticException {
}
}