You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jp...@apache.org on 2015/01/15 22:52:17 UTC
svn commit: r1652281 - in /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql:
optimizer/ parse/ plan/
Author: jpullokk
Date: Thu Jan 15 21:52:16 2015
New Revision: 1652281
URL: http://svn.apache.org/r1652281
Log:
Inline FileSinkOperator, Properties (Jesus Camacho Rodriguez via Laljo John Pullokkaran)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java?rev=1652281&r1=1652280&r2=1652281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java Thu Jan 15 21:52:16 2015
@@ -390,7 +390,7 @@ public class BucketingSortingReduceSinkO
}
}
- Table destTable = pGraphContext.getFsopToTable().get(fsOp);
+ Table destTable = fsOp.getConf().getTable();
if (destTable == null) {
return null;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java?rev=1652281&r1=1652280&r2=1652281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedDynPartitionOptimizer.java Thu Jan 15 21:52:16 2015
@@ -143,7 +143,7 @@ public class SortedDynPartitionOptimizer
return null;
}
- Table destTable = parseCtx.getFsopToTable().get(fsOp);
+ Table destTable = fsOp.getConf().getTable();
if (destTable == null) {
LOG.debug("Bailing out of sort dynamic partition optimization as destination table is null");
return null;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1652281&r1=1652280&r2=1652281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Thu Jan 15 21:52:16 2015
@@ -31,7 +31,6 @@ import org.apache.hadoop.hive.ql.Context
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.FetchTask;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.GroupByOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.ListSinkOperator;
@@ -43,7 +42,6 @@ import org.apache.hadoop.hive.ql.exec.Ta
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.LineageInfo;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
@@ -79,7 +77,6 @@ public class ParseContext {
private Map<MapJoinOperator, QBJoinTree> mapJoinContext;
private Map<SMBMapJoinOperator, QBJoinTree> smbMapJoinContext;
private HashMap<TableScanOperator, Table> topToTable;
- private Map<FileSinkOperator, Table> fsopToTable;
private List<ReduceSinkOperator> reduceSinkOperatorsAddedByEnforceBucketingSorting;
private HashMap<TableScanOperator, Map<String, String>> topToProps;
private HashMap<String, SplitSample> nameToSplitSample;
@@ -169,7 +166,6 @@ public class ParseContext {
Map<SMBMapJoinOperator, QBJoinTree> smbMapJoinContext,
HashMap<TableScanOperator, Table> topToTable,
HashMap<TableScanOperator, Map<String, String>> topToProps,
- Map<FileSinkOperator, Table> fsopToTable,
List<LoadTableDesc> loadTableWork, List<LoadFileDesc> loadFileWork,
Context ctx, HashMap<String, String> idToTableNameMap, int destTableId,
UnionProcContext uCtx, List<AbstractMapJoinOperator<? extends MapJoinDesc>> listMapJoinOpsNoReducer,
@@ -191,7 +187,6 @@ public class ParseContext {
this.joinContext = joinContext;
this.smbMapJoinContext = smbMapJoinContext;
this.topToTable = topToTable;
- this.fsopToTable = fsopToTable;
this.topToProps = topToProps;
this.loadFileWork = loadFileWork;
this.loadTableWork = loadTableWork;
@@ -312,14 +307,6 @@ public class ParseContext {
this.topToTable = topToTable;
}
- public Map<FileSinkOperator, Table> getFsopToTable() {
- return fsopToTable;
- }
-
- public void setFsopToTable(Map<FileSinkOperator, Table> fsopToTable) {
- this.fsopToTable = fsopToTable;
- }
-
public List<ReduceSinkOperator> getReduceSinkOperatorsAddedByEnforceBucketingSorting() {
return reduceSinkOperatorsAddedByEnforceBucketingSorting;
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1652281&r1=1652280&r2=1652281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Jan 15 21:52:16 2015
@@ -395,8 +395,7 @@ public class SemanticAnalyzer extends Ba
public ParseContext getParseContext() {
return new ParseContext(conf, qb, ast, opToPartPruner, opToPartList, topOps,
topSelOps, opParseCtx, joinContext, smbMapJoinContext, topToTable, topToTableProps,
- fsopToTable, loadTableWork,
- loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
+ loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
opToPartToSkewedPruner, viewAliasToInput,
@@ -6334,7 +6333,9 @@ public class SemanticAnalyzer extends Ba
+ dest_path + " row schema: " + inputRR.toString());
}
- fsopToTable.put((FileSinkOperator) output, dest_tab);
+ FileSinkOperator fso = (FileSinkOperator) output;
+ fso.getConf().setTable(dest_tab);
+ fsopToTable.put(fso, dest_tab);
return output;
}
@@ -9966,7 +9967,7 @@ public class SemanticAnalyzer extends Ba
// 4. Generate Parse Context for Optimizer & Physical compiler
ParseContext pCtx = new ParseContext(conf, qb, plannerCtx.child, opToPartPruner, opToPartList,
topOps, topSelOps, opParseCtx, joinContext, smbMapJoinContext, topToTable, topToTableProps,
- fsopToTable, loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
+ loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions, opToSamplePruner,
globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToPartToSkewedPruner,
viewAliasToInput, reduceSinkOperatorsAddedByEnforceBucketingSorting, queryProperties);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java?rev=1652281&r1=1652280&r2=1652281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java Thu Jan 15 21:52:16 2015
@@ -26,8 +26,6 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
-import com.google.common.collect.Interner;
-import com.google.common.collect.Interners;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
@@ -60,6 +58,9 @@ import org.apache.hadoop.hive.ql.plan.Pl
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import com.google.common.collect.Interner;
+import com.google.common.collect.Interners;
+
/**
* TaskCompiler is a the base class for classes that compile
* operator pipelines into tasks.
@@ -388,7 +389,6 @@ public abstract class TaskCompiler {
pCtx.getOpToPartPruner(), pCtx.getOpToPartList(), pCtx.getTopOps(),
pCtx.getTopSelOps(), pCtx.getOpParseCtx(), pCtx.getJoinContext(),
pCtx.getSmbMapJoinContext(), pCtx.getTopToTable(), pCtx.getTopToProps(),
- pCtx.getFsopToTable(),
pCtx.getLoadTableWork(), pCtx.getLoadFileWork(), pCtx.getContext(),
pCtx.getIdToTableNameMap(), pCtx.getDestTableId(), pCtx.getUCtx(),
pCtx.getListMapJoinOpsNoReducer(), pCtx.getGroupOpToInputTables(),
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java?rev=1652281&r1=1652280&r2=1652281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java Thu Jan 15 21:52:16 2015
@@ -23,6 +23,7 @@ import java.util.List;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.metadata.Table;
/**
* FileSinkDesc.
@@ -89,6 +90,8 @@ public class FileSinkDesc extends Abstra
private AcidUtils.Operation writeType = AcidUtils.Operation.NOT_ACID;
private long txnId = 0; // transaction id for this operation
+ private transient Table table;
+
public FileSinkDesc() {
}
@@ -421,4 +424,12 @@ public class FileSinkDesc extends Abstra
public long getTransactionId() {
return txnId;
}
+
+ public Table getTable() {
+ return table;
+ }
+
+ public void setTable(Table table) {
+ this.table = table;
+ }
}