You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/08/28 04:57:38 UTC
svn commit: r1621023 - in /hive/branches/cbo/ql: ./
src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/
src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/
src/java/org/apache/hadoop/hive/ql/parse/
Author: gunther
Date: Thu Aug 28 02:57:38 2014
New Revision: 1621023
URL: http://svn.apache.org/r1621023
Log:
HIVE-7814: CBO: Predicate Push Down Enhancements (Laljo John Pullokkaran via Gunther Hagleitner)
Modified:
hive/branches/cbo/ql/pom.xml
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
Modified: hive/branches/cbo/ql/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/pom.xml?rev=1621023&r1=1621022&r2=1621023&view=diff
==============================================================================
--- hive/branches/cbo/ql/pom.xml (original)
+++ hive/branches/cbo/ql/pom.xml Thu Aug 28 02:57:38 2014
@@ -28,7 +28,7 @@
<name>Hive Query Language</name>
<properties>
- <optiq.version>0.9.0-incubating-SNAPSHOT</optiq.version>
+ <optiq.version>0.9.1-incubating-SNAPSHOT</optiq.version>
<hive.path.to.root>..</hive.path.to.root>
</properties>
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java?rev=1621023&r1=1621022&r2=1621023&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/rules/HivePushFilterPastJoinRule.java Thu Aug 28 02:57:38 2014
@@ -115,7 +115,7 @@ public abstract class HivePushFilterPast
final Holder<JoinRelType> joinTypeHolder = Holder.of(join.getJoinType());
if (RelOptUtil.classifyFilters(join, aboveFilters,
join.getJoinType(), true, !join.getJoinType().generatesNullsOnLeft(), !join.getJoinType()
- .generatesNullsOnRight(), joinFilters, leftFilters, rightFilters, joinTypeHolder, false)) {
+ .generatesNullsOnRight(), joinFilters, leftFilters, rightFilters, joinTypeHolder, smart)) {
filterPushed = true;
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java?rev=1621023&r1=1621022&r2=1621023&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java Thu Aug 28 02:57:38 2014
@@ -8,7 +8,6 @@ import java.util.concurrent.atomic.Atomi
import net.hydromatic.optiq.util.BitSets;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveJoinRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveSortRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.SqlFunctionConverter.HiveToken;
import org.apache.hadoop.hive.ql.parse.ASTNode;
@@ -123,21 +122,15 @@ public class ASTConverter {
/*
* 6. Project
*/
- if (!select.getChildExps().isEmpty()) {
+ ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT");
+ int i = 0;
- ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT");
- int i = 0;
-
- for (RexNode r : select.getChildExps()) {
- ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(new RexVisitor(schema)), select
- .getRowType().getFieldNames().get(i++));
- b.add(selectExpr);
- }
- hiveAST.select = b.node();
- } else {
- throw new IllegalStateException("Expected non-zero children for select.");
+ for (RexNode r : select.getChildExps()) {
+ ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(new RexVisitor(schema)), select
+ .getRowType().getFieldNames().get(i++));
+ b.add(selectExpr);
}
-
+ hiveAST.select = b.node();
/*
* 7. Order Use in Order By from the block above. RelNode has no pointer to
@@ -226,7 +219,7 @@ public class ASTConverter {
public void handle(FilterRelBase filter) {
RelNode child = filter.getChild();
- if (child instanceof AggregateRelBase) {
+ if (child instanceof AggregateRelBase && !((AggregateRelBase) child).getGroupSet().isEmpty()) {
ASTConverter.this.having = filter;
} else {
ASTConverter.this.where = filter;
@@ -502,7 +495,7 @@ public class ASTConverter {
* 1. ProjectRel will always be child of SortRel.<br>
* 2. In Optiq every projection in ProjectRelBase is uniquely named
* (unambigous) without using table qualifier (table name).<br>
- *
+ *
* @param order
* Hive Sort Rel Node
* @return Schema
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java?rev=1621023&r1=1621022&r2=1621023&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/DerivedTableInjector.java Thu Aug 28 02:57:38 2014
@@ -207,7 +207,8 @@ public class DerivedTableInjector {
// TOODO: Verify GB having is not a seperate filter (if so we shouldn't
// introduce derived table)
if (parent instanceof JoinRelBase || parent instanceof SetOpRel
- || parent instanceof AggregateRelBase) {
+ || parent instanceof AggregateRelBase
+ || (parent instanceof FilterRelBase && ((AggregateRelBase) gbNode).getGroupSet().isEmpty())) {
validParent = false;
}
Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1621023&r1=1621022&r2=1621023&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Aug 28 02:57:38 2014
@@ -223,8 +223,8 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.InputFormat;
import org.eigenbase.rel.AggregateCall;
import org.eigenbase.rel.Aggregation;
+import org.eigenbase.rel.FilterRelBase;
import org.eigenbase.rel.InvalidRelException;
-import org.eigenbase.rel.JoinInfo;
import org.eigenbase.rel.JoinRelBase;
import org.eigenbase.rel.JoinRelType;
import org.eigenbase.rel.RelCollation;
@@ -236,7 +236,12 @@ import org.eigenbase.rel.metadata.Chaine
import org.eigenbase.rel.metadata.RelMetadataProvider;
import org.eigenbase.rel.rules.ConvertMultiJoinRule;
import org.eigenbase.rel.rules.LoptOptimizeJoinRule;
+import org.eigenbase.rel.rules.MergeFilterRule;
+import org.eigenbase.rel.rules.PushFilterPastProjectRule;
+import org.eigenbase.rel.rules.PushFilterPastSetOpRule;
+import org.eigenbase.rel.rules.RemoveTrivialProjectRule;
import org.eigenbase.rel.rules.SemiJoinRel;
+import org.eigenbase.rel.rules.TransitivePredicatesOnJoinRule;
import org.eigenbase.relopt.RelOptCluster;
import org.eigenbase.relopt.RelOptPlanner;
import org.eigenbase.relopt.RelOptQuery;
@@ -11826,7 +11831,8 @@ public class SemanticAnalyzer extends Ba
LinkedHashMap<RelNode, RowResolver> m_relToHiveRR = new LinkedHashMap<RelNode, RowResolver>();
LinkedHashMap<RelNode, ImmutableMap<String, Integer>> m_relToHiveColNameOptiqPosMap = new LinkedHashMap<RelNode, ImmutableMap<String, Integer>>();
- private ASTNode getOptimizedAST(Map<String, PrunedPartitionList> partitionCache) throws SemanticException {
+ private ASTNode getOptimizedAST(Map<String, PrunedPartitionList> partitionCache)
+ throws SemanticException {
ASTNode optiqOptimizedAST = null;
RelNode optimizedOptiqPlan = null;
this.partitionCache = partitionCache;
@@ -11839,8 +11845,7 @@ public class SemanticAnalyzer extends Ba
else
throw new RuntimeException(e);
}
- optiqOptimizedAST = ASTConverter
- .convert(optimizedOptiqPlan, resultSchema);
+ optiqOptimizedAST = ASTConverter.convert(optimizedOptiqPlan, resultSchema);
return optiqOptimizedAST;
}
@@ -11895,33 +11900,35 @@ public class SemanticAnalyzer extends Ba
optiqOptimizedPlan = hepPlanner.findBestExp();
-
if (LOG.isDebugEnabled()) {
LOG.debug("CBO Planning details:\n");
LOG.debug("Original Plan:\n");
- LOG.debug(RelOptUtil.toString(optiqGenPlan,
- SqlExplainLevel.ALL_ATTRIBUTES));
+ LOG.debug(RelOptUtil.toString(optiqGenPlan, SqlExplainLevel.ALL_ATTRIBUTES));
LOG.debug("Plan After PPD, PartPruning, ColumnPruning:\n");
- LOG.debug(RelOptUtil.toString(optiqPreCboPlan,
- SqlExplainLevel.ALL_ATTRIBUTES));
+ LOG.debug(RelOptUtil.toString(optiqPreCboPlan, SqlExplainLevel.ALL_ATTRIBUTES));
LOG.debug("Plan After Join Reordering:\n");
- LOG.debug(RelOptUtil.toString(optiqOptimizedPlan,
- SqlExplainLevel.ALL_ATTRIBUTES));
+ LOG.debug(RelOptUtil.toString(optiqOptimizedPlan, SqlExplainLevel.ALL_ATTRIBUTES));
}
return optiqOptimizedPlan;
}
- public RelNode applyPreCBOTransforms(RelNode basePlan,
- RelMetadataProvider mdProvider) {
+ public RelNode applyPreCBOTransforms(RelNode basePlan, RelMetadataProvider mdProvider) {
// TODO: Decorelation of subquery should be done before attempting
// Partition Pruning; otherwise Expression evaluation may try to execute
// corelated sub query.
- basePlan = hepPlan(basePlan, mdProvider,
- HivePushFilterPastJoinRule.JOIN,
- HivePushFilterPastJoinRule.FILTER_ON_JOIN,
- new HivePartitionPrunerRule(SemanticAnalyzer.this.conf));
+ basePlan = hepPlan(basePlan, true, mdProvider, new PushFilterPastProjectRule(
+ FilterRelBase.class, HiveFilterRel.DEFAULT_FILTER_FACTORY, HiveProjectRel.class,
+ HiveProjectRel.DEFAULT_PROJECT_FACTORY), new PushFilterPastSetOpRule(
+ HiveFilterRel.DEFAULT_FILTER_FACTORY), new MergeFilterRule(
+ HiveFilterRel.DEFAULT_FILTER_FACTORY), HivePushFilterPastJoinRule.JOIN,
+ HivePushFilterPastJoinRule.FILTER_ON_JOIN);
+
+ basePlan = hepPlan(basePlan, false, mdProvider, new TransitivePredicatesOnJoinRule(
+ JoinRelBase.class, HiveFilterRel.DEFAULT_FILTER_FACTORY),
+ RemoveTrivialProjectRule.INSTANCE,
+ new HivePartitionPrunerRule(SemanticAnalyzer.this.conf));
HiveRelFieldTrimmer fieldTrimmer = new HiveRelFieldTrimmer(null);
basePlan = fieldTrimmer.trim(basePlan);
@@ -11929,12 +11936,18 @@ public class SemanticAnalyzer extends Ba
return basePlan;
}
- private RelNode hepPlan(RelNode basePlan,
- RelMetadataProvider mdProvider, RelOptRule...rules) {
+ private RelNode hepPlan(RelNode basePlan, boolean followPlanChanges,
+ RelMetadataProvider mdProvider, RelOptRule... rules) {
+ RelNode optimizedRelNode = basePlan;
HepProgramBuilder programBuilder = new HepProgramBuilder();
- for(RelOptRule rule : rules) {
- programBuilder.addRuleInstance(rule);
+ if (followPlanChanges) {
+ programBuilder.addMatchOrder(HepMatchOrder.TOP_DOWN);
+ programBuilder = programBuilder.addRuleCollection(ImmutableList.copyOf(rules));
+ } else {
+ // TODO: Should this be also TOP_DOWN?
+ for (RelOptRule r : rules)
+ programBuilder.addRuleInstance(r);
}
HepPlanner planner = new HepPlanner(programBuilder.build());
@@ -11946,7 +11959,9 @@ public class SemanticAnalyzer extends Ba
new CachingRelMetadataProvider(chainedProvider, planner));
planner.setRoot(basePlan);
- return planner.findBestExp();
+ optimizedRelNode = planner.findBestExp();
+
+ return optimizedRelNode;
}
@SuppressWarnings("nls")
@@ -12082,8 +12097,8 @@ public class SemanticAnalyzer extends Ba
return unionRel;
}
- private RelNode genJoinRelNode(RelNode leftRel, RelNode rightRel,
- JoinType hiveJoinType, ASTNode joinCond) throws SemanticException {
+ private RelNode genJoinRelNode(RelNode leftRel, RelNode rightRel, JoinType hiveJoinType,
+ ASTNode joinCond) throws SemanticException {
RelNode joinRel = null;
// 1. construct the RowResolver for the new Join Node by combining row
@@ -12102,16 +12117,16 @@ public class SemanticAnalyzer extends Ba
// 2. Construct ExpressionNodeDesc representing Join Condition
RexNode optiqJoinCond = null;
if (joinCond != null) {
- Map<ASTNode, ExprNodeDesc> exprNodes = JoinCondnTypeCheckProcFactory
- .genExprNode(joinCond, new JoinTypeCheckCtx(leftRR, rightRR));
+ Map<ASTNode, ExprNodeDesc> exprNodes = JoinCondnTypeCheckProcFactory.genExprNode(joinCond,
+ new JoinTypeCheckCtx(leftRR, rightRR));
ExprNodeDesc joinCondnExprNode = exprNodes.get(joinCond);
List<RelNode> inputRels = new ArrayList<RelNode>();
inputRels.add(leftRel);
inputRels.add(rightRel);
- optiqJoinCond = RexNodeConverter.convert(m_cluster, joinCondnExprNode,
- inputRels, m_relToHiveRR, m_relToHiveColNameOptiqPosMap, false);
+ optiqJoinCond = RexNodeConverter.convert(m_cluster, joinCondnExprNode, inputRels,
+ m_relToHiveRR, m_relToHiveColNameOptiqPosMap, false);
} else {
optiqJoinCond = m_cluster.getRexBuilder().makeLiteral(true);
}
@@ -12143,40 +12158,36 @@ public class SemanticAnalyzer extends Ba
optiqJoinType = JoinRelType.INNER;
break;
}
-
+
if (leftSemiJoin) {
List<RelDataTypeField> sysFieldList = new ArrayList<RelDataTypeField>();
List<RexNode> leftJoinKeys = new ArrayList<RexNode>();
List<RexNode> rightJoinKeys = new ArrayList<RexNode>();
- RexNode nonEquiConds = RelOptUtil.splitJoinCondition(sysFieldList,
- leftRel, rightRel, optiqJoinCond, leftJoinKeys, rightJoinKeys,
- null, null);
+ RexNode nonEquiConds = RelOptUtil.splitJoinCondition(sysFieldList, leftRel, rightRel,
+ optiqJoinCond, leftJoinKeys, rightJoinKeys, null, null);
if (!nonEquiConds.isAlwaysTrue()) {
- throw new SemanticException(
- "Non equality condition not supported in Semi-Join"
- + nonEquiConds);
+ throw new SemanticException("Non equality condition not supported in Semi-Join"
+ + nonEquiConds);
}
RelNode[] inputRels = new RelNode[] { leftRel, rightRel };
final List<Integer> leftKeys = new ArrayList<Integer>();
final List<Integer> rightKeys = new ArrayList<Integer>();
optiqJoinCond = HiveOptiqUtil.projectNonColumnEquiConditions(
- HiveProjectRel.DEFAULT_PROJECT_FACTORY, inputRels, leftJoinKeys,
- rightJoinKeys, 0, leftKeys, rightKeys);
+ HiveProjectRel.DEFAULT_PROJECT_FACTORY, inputRels, leftJoinKeys, rightJoinKeys, 0,
+ leftKeys, rightKeys);
- joinRel = new SemiJoinRel(m_cluster,
- m_cluster.traitSetOf(HiveRel.CONVENTION), inputRels[0],
- inputRels[1], optiqJoinCond, ImmutableIntList.copyOf(leftKeys),
+ joinRel = new SemiJoinRel(m_cluster, m_cluster.traitSetOf(HiveRel.CONVENTION),
+ inputRels[0], inputRels[1], optiqJoinCond, ImmutableIntList.copyOf(leftKeys),
ImmutableIntList.copyOf(rightKeys));
} else {
- joinRel = HiveJoinRel.getJoin(m_cluster, leftRel, rightRel,
- optiqJoinCond, optiqJoinType, leftSemiJoin);
+ joinRel = HiveJoinRel.getJoin(m_cluster, leftRel, rightRel, optiqJoinCond, optiqJoinType,
+ leftSemiJoin);
}
// 5. Add new JoinRel & its RR to the maps
- m_relToHiveColNameOptiqPosMap.put(joinRel,
- this.buildHiveToOptiqColumnMap(joinRR, joinRel));
+ m_relToHiveColNameOptiqPosMap.put(joinRel, this.buildHiveToOptiqColumnMap(joinRR, joinRel));
m_relToHiveRR.put(joinRel, joinRR);
return joinRel;
@@ -12184,7 +12195,7 @@ public class SemanticAnalyzer extends Ba
/**
* Generate Join Logical Plan Relnode by walking through the join AST.
- *
+ *
* @param qb
* @param aliasToRel
* Alias(Table/Relation alias) to RelNode; only read and not
@@ -12192,8 +12203,8 @@ public class SemanticAnalyzer extends Ba
* @return
* @throws SemanticException
*/
- private RelNode genJoinLogicalPlan(ASTNode joinParseTree,
- Map<String, RelNode> aliasToRel) throws SemanticException {
+ private RelNode genJoinLogicalPlan(ASTNode joinParseTree, Map<String, RelNode> aliasToRel)
+ throws SemanticException {
RelNode leftRel = null;
RelNode rightRel = null;
JoinType hiveJoinType = null;
@@ -12227,11 +12238,10 @@ public class SemanticAnalyzer extends Ba
if ((left.getToken().getType() == HiveParser.TOK_TABREF)
|| (left.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
- String tableName = getUnescapedUnqualifiedTableName(
- (ASTNode) left.getChild(0)).toLowerCase();
- String leftTableAlias = left.getChildCount() == 1 ? tableName
- : unescapeIdentifier(left.getChild(left.getChildCount() - 1)
- .getText().toLowerCase());
+ String tableName = getUnescapedUnqualifiedTableName((ASTNode) left.getChild(0))
+ .toLowerCase();
+ String leftTableAlias = left.getChildCount() == 1 ? tableName : unescapeIdentifier(left
+ .getChild(left.getChildCount() - 1).getText().toLowerCase());
// ptf node form is: ^(TOK_PTBLFUNCTION $name $alias?
// partitionTableFunctionSource partitioningSpec? expression*)
// guranteed to have an lias here: check done in processJoin
@@ -12250,11 +12260,10 @@ public class SemanticAnalyzer extends Ba
if ((right.getToken().getType() == HiveParser.TOK_TABREF)
|| (right.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
- String tableName = getUnescapedUnqualifiedTableName(
- (ASTNode) right.getChild(0)).toLowerCase();
- String rightTableAlias = right.getChildCount() == 1 ? tableName
- : unescapeIdentifier(right.getChild(right.getChildCount() - 1)
- .getText().toLowerCase());
+ String tableName = getUnescapedUnqualifiedTableName((ASTNode) right.getChild(0))
+ .toLowerCase();
+ String rightTableAlias = right.getChildCount() == 1 ? tableName : unescapeIdentifier(right
+ .getChild(right.getChildCount() - 1).getText().toLowerCase());
// ptf node form is: ^(TOK_PTBLFUNCTION $name $alias?
// partitionTableFunctionSource partitioningSpec? expression*)
// guranteed to have an lias here: check done in processJoin
@@ -12289,46 +12298,42 @@ public class SemanticAnalyzer extends Ba
// Virtual Cols
// 3.1 Add Column info for non partion cols (Object Inspector fields)
- StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
- .getDeserializer().getObjectInspector();
- List<? extends StructField> fields = rowObjectInspector
- .getAllStructFieldRefs();
+ StructObjectInspector rowObjectInspector = (StructObjectInspector) tab.getDeserializer()
+ .getObjectInspector();
+ List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
ColumnInfo colInfo;
String colName;
ArrayList<ColumnInfo> cInfoLst = new ArrayList<ColumnInfo>();
for (int i = 0; i < fields.size(); i++) {
colName = fields.get(i).getFieldName();
- colInfo = new ColumnInfo(fields.get(i).getFieldName(),
- TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
- .getFieldObjectInspector()), tableAlias, false);
- colInfo.setSkewedCol((isSkewedCol(tableAlias, qb, colName)) ? true
- : false);
+ colInfo = new ColumnInfo(
+ fields.get(i).getFieldName(),
+ TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()),
+ tableAlias, false);
+ colInfo.setSkewedCol((isSkewedCol(tableAlias, qb, colName)) ? true : false);
rr.put(tableAlias, colName, colInfo);
cInfoLst.add(colInfo);
}
// TODO: Fix this
- ArrayList<ColumnInfo> nonPartitionColumns = new ArrayList<ColumnInfo>(
- cInfoLst);
+ ArrayList<ColumnInfo> nonPartitionColumns = new ArrayList<ColumnInfo>(cInfoLst);
ArrayList<ColumnInfo> partitionColumns = new ArrayList<ColumnInfo>();
// 3.2 Add column info corresponding to partition columns
for (FieldSchema part_col : tab.getPartCols()) {
colName = part_col.getName();
colInfo = new ColumnInfo(colName,
- TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()),
- tableAlias, true);
+ TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), tableAlias, true);
rr.put(tableAlias, colName, colInfo);
cInfoLst.add(colInfo);
partitionColumns.add(colInfo);
}
// 3.3 Add column info corresponding to virtual columns
- Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf)
- .iterator();
+ Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
- colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias,
- true, vc.getIsHidden());
+ colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias, true,
+ vc.getIsHidden());
rr.put(tableAlias, vc.getName(), colInfo);
cInfoLst.add(colInfo);
}
@@ -12337,16 +12342,15 @@ public class SemanticAnalyzer extends Ba
RelDataType rowType = TypeConverter.getType(m_cluster, rr, null);
// 4. Build RelOptAbstractTable
- RelOptHiveTable optTable = new RelOptHiveTable(m_relOptSchema,
- tableAlias, rowType, tab, nonPartitionColumns, partitionColumns, conf, partitionCache);
+ RelOptHiveTable optTable = new RelOptHiveTable(m_relOptSchema, tableAlias, rowType, tab,
+ nonPartitionColumns, partitionColumns, conf, partitionCache);
// 5. Build Hive Table Scan Rel
- tableRel = new HiveTableScanRel(m_cluster,
- m_cluster.traitSetOf(HiveRel.CONVENTION), optTable, rowType);
+ tableRel = new HiveTableScanRel(m_cluster, m_cluster.traitSetOf(HiveRel.CONVENTION),
+ optTable, rowType);
// 6. Add Schema(RR) to RelNode-Schema map
- ImmutableMap<String, Integer> hiveToOptiqColMap = buildHiveToOptiqColumnMap(
- rr, tableRel);
+ ImmutableMap<String, Integer> hiveToOptiqColMap = buildHiveToOptiqColumnMap(rr, tableRel);
m_relToHiveRR.put(tableRel, rr);
m_relToHiveColNameOptiqPosMap.put(tableRel, hiveToOptiqColMap);
} catch (Exception e) {
@@ -12356,17 +12360,14 @@ public class SemanticAnalyzer extends Ba
return tableRel;
}
- private RelNode genFilterRelNode(ASTNode filterExpr, RelNode srcRel)
- throws SemanticException {
- ExprNodeDesc filterCondn = genExprNodeDesc(filterExpr,
- m_relToHiveRR.get(srcRel));
+ private RelNode genFilterRelNode(ASTNode filterExpr, RelNode srcRel) throws SemanticException {
+ ExprNodeDesc filterCondn = genExprNodeDesc(filterExpr, m_relToHiveRR.get(srcRel));
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = this.m_relToHiveColNameOptiqPosMap
.get(srcRel);
- RexNode convertedFilterExpr = new RexNodeConverter(m_cluster,
- srcRel.getRowType(), hiveColNameOptiqPosMap, 0, true)
- .convert(filterCondn);
- RelNode filterRel = new HiveFilterRel(m_cluster,
- m_cluster.traitSetOf(HiveRel.CONVENTION), srcRel, convertedFilterExpr);
+ RexNode convertedFilterExpr = new RexNodeConverter(m_cluster, srcRel.getRowType(),
+ hiveColNameOptiqPosMap, 0, true).convert(filterCondn);
+ RelNode filterRel = new HiveFilterRel(m_cluster, m_cluster.traitSetOf(HiveRel.CONVENTION),
+ srcRel, convertedFilterExpr);
this.m_relToHiveColNameOptiqPosMap.put(filterRel, hiveColNameOptiqPosMap);
m_relToHiveRR.put(filterRel, m_relToHiveRR.get(srcRel));
m_relToHiveColNameOptiqPosMap.put(filterRel, hiveColNameOptiqPosMap);
@@ -12375,35 +12376,29 @@ public class SemanticAnalyzer extends Ba
}
private RelNode genFilterRelNode(QB qb, ASTNode searchCond, RelNode srcRel,
- Map<String, RelNode> aliasToRel, boolean forHavingClause)
- throws SemanticException {
+ Map<String, RelNode> aliasToRel, boolean forHavingClause) throws SemanticException {
/*
* Handle Subquery predicates.
- *
- * Notes (8/22/14 hb):
- * Why is this a copy of the code from {@link #genFilterPlan}
- * - for now we will support the same behavior as non CBO route.
- * - but plan to allow nested SubQueries(Restriction.9.m) and
- * multiple SubQuery expressions(Restriction.8.m). This
- * requires use to utilize Optiq's Decorrelation mechanics,
- * and for Optiq to fix/flush out Null semantics(OPTIQ-373)
- * - besides only the driving code has been copied. Most of
- * the code which is SubQueryUtils and QBSubQuery is reused.
- *
+ *
+ * Notes (8/22/14 hb): Why is this a copy of the code from {@link
+ * #genFilterPlan} - for now we will support the same behavior as non CBO
+ * route. - but plan to allow nested SubQueries(Restriction.9.m) and
+ * multiple SubQuery expressions(Restriction.8.m). This requires use to
+ * utilize Optiq's Decorrelation mechanics, and for Optiq to fix/flush out
+ * Null semantics(OPTIQ-373) - besides only the driving code has been
+ * copied. Most of the code which is SubQueryUtils and QBSubQuery is
+ * reused.
*/
int numSrcColumns = srcRel.getRowType().getFieldCount();
- List<ASTNode> subQueriesInOriginalTree = SubQueryUtils
- .findSubQueries(searchCond);
+ List<ASTNode> subQueriesInOriginalTree = SubQueryUtils.findSubQueries(searchCond);
if (subQueriesInOriginalTree.size() > 0) {
/*
* Restriction.9.m :: disallow nested SubQuery expressions.
*/
if (qb.getSubQueryPredicateDef() != null) {
- throw new SemanticException(
- ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
- subQueriesInOriginalTree.get(0),
- "Nested SubQuery expressions are not supported."));
+ throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
+ subQueriesInOriginalTree.get(0), "Nested SubQuery expressions are not supported."));
}
/*
@@ -12411,19 +12406,15 @@ public class SemanticAnalyzer extends Ba
*/
if (subQueriesInOriginalTree.size() > 1) {
- throw new SemanticException(
- ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
- subQueriesInOriginalTree.get(1),
- "Only 1 SubQuery expression is supported."));
+ throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
+ subQueriesInOriginalTree.get(1), "Only 1 SubQuery expression is supported."));
}
/*
* Clone the Search AST; apply all rewrites on the clone.
*/
- ASTNode clonedSearchCond = (ASTNode) SubQueryUtils.adaptor
- .dupTree(searchCond);
- List<ASTNode> subQueries = SubQueryUtils
- .findSubQueries(clonedSearchCond);
+ ASTNode clonedSearchCond = (ASTNode) SubQueryUtils.adaptor.dupTree(searchCond);
+ List<ASTNode> subQueries = SubQueryUtils.findSubQueries(clonedSearchCond);
RowResolver inputRR = m_relToHiveRR.get(srcRel);
@@ -12432,11 +12423,10 @@ public class SemanticAnalyzer extends Ba
ASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i);
int sqIdx = qb.incrNumSubQueryPredicates();
- clonedSearchCond = SubQueryUtils.rewriteParentQueryWhere(
- clonedSearchCond, subQueryAST);
+ clonedSearchCond = SubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST);
- QBSubQuery subQuery = SubQueryUtils.buildSubQuery(qb.getId(), sqIdx,
- subQueryAST, originalSubQueryAST, ctx);
+ QBSubQuery subQuery = SubQueryUtils.buildSubQuery(qb.getId(), sqIdx, subQueryAST,
+ originalSubQueryAST, ctx);
if (!forHavingClause) {
qb.setWhereClauseSubQueryPredicate(subQuery);
@@ -12450,11 +12440,10 @@ public class SemanticAnalyzer extends Ba
aliasToRel.put(havingInputAlias, srcRel);
}
- subQuery.validateAndRewriteAST(inputRR, forHavingClause,
- havingInputAlias, aliasToRel.keySet());
+ subQuery.validateAndRewriteAST(inputRR, forHavingClause, havingInputAlias,
+ aliasToRel.keySet());
- QB qbSQ = new QB(subQuery.getOuterQueryId(), subQuery.getAlias(),
- true);
+ QB qbSQ = new QB(subQuery.getOuterQueryId(), subQuery.getAlias(), true);
qbSQ.setSubQueryDef(subQuery.getSubQuery());
Phase1Ctx ctx_1 = initPhase1Ctx();
doPhase1(subQuery.getSubQueryAST(), qbSQ, ctx_1);
@@ -12469,11 +12458,9 @@ public class SemanticAnalyzer extends Ba
*/
if (subQuery.getOperator().getType() != SubQueryType.EXISTS
&& subQuery.getOperator().getType() != SubQueryType.NOT_EXISTS
- && sqRR.getColumnInfos().size()
- - subQuery.getNumOfCorrelationExprsAddedToSQSelect() > 1) {
- throw new SemanticException(
- ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(subQueryAST,
- "SubQuery can contain only 1 item in Select List."));
+ && sqRR.getColumnInfos().size() - subQuery.getNumOfCorrelationExprsAddedToSQSelect() > 1) {
+ throw new SemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(subQueryAST,
+ "SubQuery can contain only 1 item in Select List."));
}
/*
@@ -12484,8 +12471,7 @@ public class SemanticAnalyzer extends Ba
if (subQuery.getNotInCheck() != null) {
QBSubQuery.NotInCheck notInCheck = subQuery.getNotInCheck();
notInCheck.setSQRR(sqRR);
- QB qbSQ_nic = new QB(subQuery.getOuterQueryId(),
- notInCheck.getAlias(), true);
+ QB qbSQ_nic = new QB(subQuery.getOuterQueryId(), notInCheck.getAlias(), true);
qbSQ_nic.setSubQueryDef(notInCheck.getSubQuery());
ctx_1 = initPhase1Ctx();
doPhase1(notInCheck.getSubQueryAST(), qbSQ_nic, ctx_1);
@@ -12505,10 +12491,9 @@ public class SemanticAnalyzer extends Ba
/*
* Gen Join between outer Operator and SQ op
*/
- subQuery.buildJoinCondition(inputRR, sqRR, forHavingClause,
- havingInputAlias);
- srcRel = genJoinRelNode(srcRel, subQueryRelNode,
- subQuery.getJoinType(), subQuery.getJoinConditionAST());
+ subQuery.buildJoinCondition(inputRR, sqRR, forHavingClause, havingInputAlias);
+ srcRel = genJoinRelNode(srcRel, subQueryRelNode, subQuery.getJoinType(),
+ subQuery.getJoinConditionAST());
searchCond = subQuery.updateOuterQueryFilter(clonedSearchCond);
srcRel = genFilterRelNode(searchCond, srcRel);
@@ -12528,8 +12513,7 @@ public class SemanticAnalyzer extends Ba
return genFilterRelNode(searchCond, srcRel);
}
- private RelNode projectLeftOuterSide(RelNode srcRel, int numColumns)
- throws SemanticException {
+ private RelNode projectLeftOuterSide(RelNode srcRel, int numColumns) throws SemanticException {
RowResolver iRR = m_relToHiveRR.get(srcRel);
RowResolver oRR = new RowResolver();
RowResolver.add(oRR, iRR, 0, numColumns);
@@ -12541,30 +12525,26 @@ public class SemanticAnalyzer extends Ba
for (int i = 0; i < iType.getFieldCount(); i++) {
RelDataTypeField fType = iType.getFieldList().get(i);
String fName = iType.getFieldNames().get(i);
- optiqColLst.add(m_cluster.getRexBuilder().makeInputRef(fType.getType(),
- i));
+ optiqColLst.add(m_cluster.getRexBuilder().makeInputRef(fType.getType(), i));
oFieldNames.add(fName);
}
HiveRel selRel = HiveProjectRel.create(srcRel, optiqColLst, oFieldNames);
- this.m_relToHiveColNameOptiqPosMap.put(selRel,
- buildHiveToOptiqColumnMap(oRR, selRel));
+ this.m_relToHiveColNameOptiqPosMap.put(selRel, buildHiveToOptiqColumnMap(oRR, selRel));
this.m_relToHiveRR.put(selRel, oRR);
return selRel;
}
- private RelNode genFilterLogicalPlan(QB qb, RelNode srcRel,
- Map<String, RelNode> aliasToRel,
- boolean forHavingClause)
- throws SemanticException {
+ private RelNode genFilterLogicalPlan(QB qb, RelNode srcRel, Map<String, RelNode> aliasToRel,
+ boolean forHavingClause) throws SemanticException {
RelNode filterRel = null;
- Iterator<ASTNode> whereClauseIterator = getQBParseInfo(qb)
- .getDestToWhereExpr().values().iterator();
+ Iterator<ASTNode> whereClauseIterator = getQBParseInfo(qb).getDestToWhereExpr().values()
+ .iterator();
if (whereClauseIterator.hasNext()) {
- filterRel = genFilterRelNode(qb, (ASTNode) whereClauseIterator.next().getChild(0),
- srcRel, aliasToRel, forHavingClause);
+ filterRel = genFilterRelNode(qb, (ASTNode) whereClauseIterator.next().getChild(0), srcRel,
+ aliasToRel, forHavingClause);
}
return filterRel;
@@ -12579,8 +12559,8 @@ public class SemanticAnalyzer extends Ba
private final String m_udfName;
private final boolean m_distinct;
- private AggInfo(List<ExprNodeDesc> aggParams, TypeInfo returnType,
- String udfName, boolean isDistinct) {
+ private AggInfo(List<ExprNodeDesc> aggParams, TypeInfo returnType, String udfName,
+ boolean isDistinct) {
m_aggParams = aggParams;
m_returnType = returnType;
m_udfName = udfName;
@@ -12627,14 +12607,13 @@ public class SemanticAnalyzer extends Ba
return new AggregateCall(aggregation, agg.m_distinct, argList, aggFnRetType, null);
}
- private RelNode genGBRelNode(List<ExprNodeDesc> gbExprs,
- List<AggInfo> aggInfoLst, RelNode srcRel) throws SemanticException {
+ private RelNode genGBRelNode(List<ExprNodeDesc> gbExprs, List<AggInfo> aggInfoLst,
+ RelNode srcRel) throws SemanticException {
RowResolver gbInputRR = this.m_relToHiveRR.get(srcRel);
ArrayList<ColumnInfo> signature = gbInputRR.getRowSchema().getSignature();
- ImmutableMap<String, Integer> posMap = this.m_relToHiveColNameOptiqPosMap
- .get(srcRel);
- RexNodeConverter converter = new RexNodeConverter(this.m_cluster,
- srcRel.getRowType(), posMap, 0, false);
+ ImmutableMap<String, Integer> posMap = this.m_relToHiveColNameOptiqPosMap.get(srcRel);
+ RexNodeConverter converter = new RexNodeConverter(this.m_cluster, srcRel.getRowType(),
+ posMap, 0, false);
final List<RexNode> gbChildProjLst = Lists.newArrayList();
final HashMap<String, Integer> rexNodeToPosMap = new HashMap<String, Integer>();
@@ -12652,8 +12631,8 @@ public class SemanticAnalyzer extends Ba
List<AggregateCall> aggregateCalls = Lists.newArrayList();
int i = aggInfoLst.size();
for (AggInfo agg : aggInfoLst) {
- aggregateCalls.add(convertGBAgg(agg, srcRel, gbChildProjLst, converter,
- rexNodeToPosMap, gbChildProjLst.size()));
+ aggregateCalls.add(convertGBAgg(agg, srcRel, gbChildProjLst, converter, rexNodeToPosMap,
+ gbChildProjLst.size()));
}
if (gbChildProjLst.isEmpty()) {
@@ -12665,9 +12644,8 @@ public class SemanticAnalyzer extends Ba
HiveRel aggregateRel = null;
try {
- aggregateRel = new HiveAggregateRel(m_cluster,
- m_cluster.traitSetOf(HiveRel.CONVENTION), gbInputRel, groupSet,
- aggregateCalls);
+ aggregateRel = new HiveAggregateRel(m_cluster, m_cluster.traitSetOf(HiveRel.CONVENTION),
+ gbInputRel, groupSet, aggregateCalls);
} catch (InvalidRelException e) {
throw new SemanticException(e);
}
@@ -12675,18 +12653,16 @@ public class SemanticAnalyzer extends Ba
return aggregateRel;
}
- private void addAlternateGByKeyMappings(ASTNode gByExpr,
- ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) {
+ private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo,
+ RowResolver gByInputRR, RowResolver gByRR) {
if (gByExpr.getType() == HiveParser.DOT
&& gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) {
- String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
- .getChild(0).getChild(0).getText());
- String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
- .getChild(1).getText());
+ String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getChild(0)
+ .getText());
+ String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(1).getText());
gByRR.put(tab_alias, col_alias, colInfo);
} else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) {
- String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
- .getChild(0).getText());
+ String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getText());
String tab_alias = null;
/*
* If the input to the GBy has a tab alias for the column, then add an
@@ -12705,8 +12681,8 @@ public class SemanticAnalyzer extends Ba
}
private void addToGBExpr(RowResolver groupByOutputRowResolver,
- RowResolver groupByInputRowResolver, ASTNode grpbyExpr, ExprNodeDesc grpbyExprNDesc, List<ExprNodeDesc> gbExprNDescLst,
- List<String> outputColumnNames) {
+ RowResolver groupByInputRowResolver, ASTNode grpbyExpr, ExprNodeDesc grpbyExprNDesc,
+ List<ExprNodeDesc> gbExprNDescLst, List<String> outputColumnNames) {
// TODO: Should we use grpbyExprNDesc.getTypeInfo()? what if expr is
// UDF
int i = gbExprNDescLst.size();
@@ -12787,7 +12763,7 @@ public class SemanticAnalyzer extends Ba
/**
* Generate GB plan.
- *
+ *
* @param qb
* @param srcRel
* @return TODO: 1. Grouping Sets (roll up..)
@@ -12876,8 +12852,7 @@ public class SemanticAnalyzer extends Ba
return gbRel;
}
- private RelNode genOBLogicalPlan(QB qb, RelNode srcRel)
- throws SemanticException {
+ private RelNode genOBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException {
RelNode relToRet = null;
QBParseInfo qbp = getQBParseInfo(qb);
@@ -12888,8 +12863,7 @@ public class SemanticAnalyzer extends Ba
// 1. OB Expr sanity test
// in strict mode, in the presence of order by, limit must be specified
Integer limit = qb.getParseInfo().getDestLimit(dest);
- if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
- "strict")
+ if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase("strict")
&& limit == null) {
throw new SemanticException(generateErrorMessage(obAST,
ErrorMsg.NO_LIMIT_WITH_ORDERBY.getMsg()));
@@ -12908,21 +12882,18 @@ public class SemanticAnalyzer extends Ba
RowResolver outputRR = new RowResolver();
RexNode rnd;
- RexNodeConverter converter = new RexNodeConverter(m_cluster,
- srcRel.getRowType(), m_relToHiveColNameOptiqPosMap.get(srcRel), 0,
- false);
+ RexNodeConverter converter = new RexNodeConverter(m_cluster, srcRel.getRowType(),
+ m_relToHiveColNameOptiqPosMap.get(srcRel), 0, false);
int srcRelRecordSz = srcRel.getRowType().getFieldCount();
for (int i = 0; i < obASTExprLst.size(); i++) {
// 2.1 Convert AST Expr to ExprNode
obASTExpr = (ASTNode) obASTExprLst.get(i);
- Map<ASTNode, ExprNodeDesc> astToExprNDescMap = TypeCheckProcFactory
- .genExprNode(obASTExpr, new TypeCheckCtx(inputRR));
- ExprNodeDesc obExprNDesc = astToExprNDescMap.get(obASTExpr
- .getChild(0));
+ Map<ASTNode, ExprNodeDesc> astToExprNDescMap = TypeCheckProcFactory.genExprNode(
+ obASTExpr, new TypeCheckCtx(inputRR));
+ ExprNodeDesc obExprNDesc = astToExprNDescMap.get(obASTExpr.getChild(0));
if (obExprNDesc == null)
- throw new SemanticException("Invalid order by expression: "
- + obASTExpr.toString());
+ throw new SemanticException("Invalid order by expression: " + obASTExpr.toString());
// 2.2 Convert ExprNode to RexNode
rnd = converter.convert(obExprNDesc);
@@ -12935,8 +12906,8 @@ public class SemanticAnalyzer extends Ba
} else {
fieldIndex = srcRelRecordSz + newVCLst.size();
newVCLst.add(rnd);
- vcASTTypePairs.add(new Pair<ASTNode, TypeInfo>((ASTNode) obASTExpr
- .getChild(0), obExprNDesc.getTypeInfo()));
+ vcASTTypePairs.add(new Pair<ASTNode, TypeInfo>((ASTNode) obASTExpr.getChild(0),
+ obExprNDesc.getTypeInfo()));
}
// 2.4 Determine the Direction of order by
@@ -12952,26 +12923,25 @@ public class SemanticAnalyzer extends Ba
// 3. Add Child Project Rel if needed
RelNode obInputRel = srcRel;
if (!newVCLst.isEmpty()) {
- List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType()
- .getFieldList(), new Function<RelDataTypeField, RexNode>() {
- @Override
- public RexNode apply(RelDataTypeField input) {
- return new RexInputRef(input.getIndex(), input.getType());
- }
- });
+ List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType().getFieldList(),
+ new Function<RelDataTypeField, RexNode>() {
+ @Override
+ public RexNode apply(RelDataTypeField input) {
+ return new RexInputRef(input.getIndex(), input.getType());
+ }
+ });
- obInputRel = HiveProjectRel.create(srcRel,
- CompositeList.of(originalInputRefs, newVCLst), null);
+ obInputRel = HiveProjectRel.create(srcRel, CompositeList.of(originalInputRefs, newVCLst),
+ null);
}
// 4. Construct SortRel
RelTraitSet traitSet = m_cluster.traitSetOf(HiveRel.CONVENTION);
- RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl
- .of(fieldCollations));
+ RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl.of(fieldCollations));
// TODO: Is it better to introduce a
// project on top to restrict VC from showing up in sortRel type
- RelNode sortRel = new HiveSortRel(m_cluster, traitSet, obInputRel,
- canonizedCollation, null, null);
+ RelNode sortRel = new HiveSortRel(m_cluster, traitSet, obInputRel, canonizedCollation,
+ null, null);
// 5. Construct OB Parent Rel If needed
// Construct a parent Project if OB has virtual columns(vc) otherwise
@@ -12980,8 +12950,7 @@ public class SemanticAnalyzer extends Ba
// type "select */.*..." then parent project is not needed
relToRet = sortRel;
if (!newVCLst.isEmpty()) {
- List<RexNode> obParentRelProjs = Lists.transform(srcRel
- .getRowType().getFieldList(),
+ List<RexNode> obParentRelProjs = Lists.transform(srcRel.getRowType().getFieldList(),
new Function<RelDataTypeField, RexNode>() {
@Override
public RexNode apply(RelDataTypeField input) {
@@ -13001,8 +12970,8 @@ public class SemanticAnalyzer extends Ba
// rowtype of sortrel is the type of it child; if child happens to be
// synthetic project that we introduced then that projectrel would
// contain the vc.
- ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(
- outputRR, relToRet);
+ ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(outputRR,
+ relToRet);
m_relToHiveRR.put(relToRet, outputRR);
m_relToHiveColNameOptiqPosMap.put(relToRet, hiveColNameOptiqPosMap);
}
@@ -13010,26 +12979,21 @@ public class SemanticAnalyzer extends Ba
return relToRet;
}
- private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel)
- throws SemanticException {
+ private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException {
HiveRel sortRel = null;
QBParseInfo qbp = getQBParseInfo(qb);
- Integer limit = qbp.getDestToLimit().get(
- qbp.getClauseNames().iterator().next());
+ Integer limit = qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next());
if (limit != null) {
- RexNode fetch = m_cluster.getRexBuilder().makeExactLiteral(
- BigDecimal.valueOf(limit));
+ RexNode fetch = m_cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(limit));
RelTraitSet traitSet = m_cluster.traitSetOf(HiveRel.CONVENTION);
- RelCollation canonizedCollation = traitSet
- .canonize(RelCollationImpl.EMPTY);
- sortRel = new HiveSortRel(m_cluster, traitSet, srcRel,
- canonizedCollation, null, fetch);
+ RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl.EMPTY);
+ sortRel = new HiveSortRel(m_cluster, traitSet, srcRel, canonizedCollation, null, fetch);
RowResolver outputRR = new RowResolver();
RowResolver.add(outputRR, m_relToHiveRR.get(srcRel), 0);
- ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(
- outputRR, sortRel);
+ ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(outputRR,
+ sortRel);
m_relToHiveRR.put(sortRel, outputRR);
m_relToHiveColNameOptiqPosMap.put(sortRel, hiveColNameOptiqPosMap);
}
@@ -13053,7 +13017,8 @@ public class SemanticAnalyzer extends Ba
return pKeys;
}
- List<RexFieldCollation> getOrderKeys(OrderSpec os, RexNodeConverter converter, RowResolver inputRR) throws SemanticException {
+ List<RexFieldCollation> getOrderKeys(OrderSpec os, RexNodeConverter converter,
+ RowResolver inputRR) throws SemanticException {
List<RexFieldCollation> oKeys = new ArrayList<RexFieldCollation>();
if (os != null) {
List<OrderExpression> oExprs = os.getExpressions();
@@ -13094,7 +13059,8 @@ public class SemanticAnalyzer extends Ba
rwb = RexWindowBound.create(SqlWindow.createUnboundedPreceding(pos), null);
} else {
sc = (SqlCall) SqlWindow.createPreceding(amt, pos);
- rwb = RexWindowBound.create(sc, m_cluster.getRexBuilder().makeCall(sc.getOperator(), amtLiteral));
+ rwb = RexWindowBound.create(sc,
+ m_cluster.getRexBuilder().makeCall(sc.getOperator(), amtLiteral));
}
break;
@@ -13104,10 +13070,12 @@ public class SemanticAnalyzer extends Ba
case FOLLOWING:
if (amt == null) {
- rwb = RexWindowBound.create(SqlWindow.createUnboundedFollowing(new SqlParserPos(1, 1)), null);
+ rwb = RexWindowBound.create(SqlWindow.createUnboundedFollowing(new SqlParserPos(1, 1)),
+ null);
} else {
sc = (SqlCall) SqlWindow.createFollowing(amt, pos);
- rwb = RexWindowBound.create(sc, m_cluster.getRexBuilder().makeCall(sc.getOperator(), amtLiteral));
+ rwb = RexWindowBound.create(sc,
+ m_cluster.getRexBuilder().makeCall(sc.getOperator(), amtLiteral));
}
break;
}
@@ -13116,9 +13084,8 @@ public class SemanticAnalyzer extends Ba
return rwb;
}
-
- Pair<RexNode, TypeInfo> genWindowingProj(QB qb, ASTNode windowProjAst, int wndSpecASTIndx, int wndProjPos,
- RelNode srcRel) throws SemanticException {
+ Pair<RexNode, TypeInfo> genWindowingProj(QB qb, ASTNode windowProjAst, int wndSpecASTIndx,
+ int wndProjPos, RelNode srcRel) throws SemanticException {
RexNode w = null;
TypeInfo wHiveRetType = null;
QBParseInfo qbp = getQBParseInfo(qb);
@@ -13130,7 +13097,7 @@ public class SemanticAnalyzer extends Ba
// WindowFunctionSpec
wSpec.validateAndMakeEffective();
WindowExpressionSpec wExpSpec = wSpec.aliasToWdwExpr.get("_wcol" + wndProjPos);
- //TODO: Throw exception if wExpSpec is not of type WindowFunctionSpec
+ // TODO: Throw exception if wExpSpec is not of type WindowFunctionSpec
if (wExpSpec instanceof WindowFunctionSpec) {
// 2. Get Hive Aggregate Info
@@ -13158,8 +13125,8 @@ public class SemanticAnalyzer extends Ba
ImmutableList<RelDataType> optiqAggFnArgsType = optiqAggFnArgsTypeBldr.build();
// 5. Get Optiq Agg Fn
- final SqlAggFunction optiqAggFn = SqlFunctionConverter.getOptiqAggFn(hiveAggInfo.m_udfName,
- optiqAggFnArgsType, optiqAggFnRetType);
+ final SqlAggFunction optiqAggFn = SqlFunctionConverter.getOptiqAggFn(
+ hiveAggInfo.m_udfName, optiqAggFnArgsType, optiqAggFnRetType);
// 6. Translate Window spec
RowResolver inputRR = m_relToHiveRR.get(srcRel);
@@ -13175,7 +13142,7 @@ public class SemanticAnalyzer extends Ba
partitionKeys, ImmutableList.<RexFieldCollation> copyOf(orderKeys), lowerBound,
upperBound, isRows, true, false);
} else {
- //TODO: Convert to Semantic Exception
+ // TODO: Convert to Semantic Exception
throw new RuntimeException("Unsupported window Spec");
}
}
@@ -13196,11 +13163,10 @@ public class SemanticAnalyzer extends Ba
/**
* NOTE: there can only be one select caluse since we don't handle multi
* destination insert.
- *
+ *
* @throws SemanticException
*/
- private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel)
- throws SemanticException {
+ private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel) throws SemanticException {
boolean subQuery;
ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>();
ArrayList<Pair<Integer, RexNode>> windowingRexNodes = new ArrayList<Pair<Integer, RexNode>>();
@@ -13238,10 +13204,8 @@ public class SemanticAnalyzer extends Ba
ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0);
GenericUDTF genericUDTF = null;
int udtfExprType = udtfExpr.getType();
- if (udtfExprType == HiveParser.TOK_FUNCTION
- || udtfExprType == HiveParser.TOK_FUNCTIONSTAR) {
- String funcName = TypeCheckProcFactory.DefaultExprProcessor
- .getFunctionText(udtfExpr, true);
+ if (udtfExprType == HiveParser.TOK_FUNCTION || udtfExprType == HiveParser.TOK_FUNCTIONSTAR) {
+ String funcName = TypeCheckProcFactory.DefaultExprProcessor.getFunctionText(udtfExpr, true);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);
if (fi != null) {
genericUDTF = fi.getGenericUDTF();
@@ -13262,16 +13226,16 @@ public class SemanticAnalyzer extends Ba
ASTNode child = (ASTNode) exprList.getChild(i);
boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2);
-
// 6.2 Handle windowing spec
int wndSpecASTIndx = -1;
- //TODO: is the check ((child.getChildCount() == 1) || hasAsClause) needed?
+ // TODO: is the check ((child.getChildCount() == 1) || hasAsClause)
+ // needed?
boolean isWindowSpec = (((child.getChildCount() == 1) || hasAsClause) && child.getChild(0)
.getType() == HiveParser.TOK_FUNCTION) ? ((wndSpecASTIndx = getWindowSpecIndx((ASTNode) child
.getChild(0))) > 0) : false;
if (isWindowSpec) {
- Pair<RexNode, TypeInfo> wtp = genWindowingProj(qb, (ASTNode) child.getChild(0), wndSpecASTIndx,
- wndProjPos, srcRel);
+ Pair<RexNode, TypeInfo> wtp = genWindowingProj(qb, (ASTNode) child.getChild(0),
+ wndSpecASTIndx, wndProjPos, srcRel);
windowingRexNodes.add(new Pair(pos, wtp.getFirst()));
// 6.2.1 Check if window expr has alias
@@ -13300,8 +13264,8 @@ public class SemanticAnalyzer extends Ba
// the
// AST's are slightly different.
if (child.getChildCount() > 2) {
- throw new SemanticException(generateErrorMessage(
- (ASTNode) child.getChild(2), ErrorMsg.INVALID_AS.getMsg()));
+ throw new SemanticException(generateErrorMessage((ASTNode) child.getChild(2),
+ ErrorMsg.INVALID_AS.getMsg()));
}
ASTNode expr;
@@ -13317,33 +13281,30 @@ public class SemanticAnalyzer extends Ba
// 6.5 Build ExprNode corresponding to colums
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
- pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
- : getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(),
- expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
+ pos = genColListRegex(".*",
+ expr.getChildCount() == 0 ? null : getUnescapedName((ASTNode) expr.getChild(0))
+ .toLowerCase(), expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
subQuery);
selectStar = true;
- } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL
- && !hasAsClause && !inputRR.getIsExprResolver()
+ } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
+ && !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
// In case the expression is a regex COL.
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
- pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
- null, expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
- subQuery);
+ pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()), null, expr,
+ col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs, subQuery);
} else if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
- && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
- .getChild(0).getText().toLowerCase())) && !hasAsClause
- && !inputRR.getIsExprResolver()
+ && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0).getChild(0).getText()
+ .toLowerCase())) && !hasAsClause && !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
// In case the expression is TABLE.COL (col can be regex).
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
- unescapeIdentifier(expr.getChild(0).getChild(0).getText()
- .toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
- tabAliasesForAllProjs, subQuery);
+ unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase()), expr,
+ col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs, subQuery);
} else {
// Case when this is an expression
TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
@@ -13361,15 +13322,13 @@ public class SemanticAnalyzer extends Ba
ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
exp.getWritableObjectInspector(), tabAlias, false);
- colInfo
- .setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp)
- .isSkewedCol() : false);
+ colInfo.setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp)
+ .isSkewedCol() : false);
out_rwsch.put(tabAlias, colAlias, colInfo);
if (exp instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exp;
- String[] altMapping = inputRR.getAlternateMappings(colExp
- .getColumn());
+ String[] altMapping = inputRR.getAlternateMappings(colExp.getColumn());
if (altMapping != null) {
out_rwsch.put(altMapping[0], altMapping[1], colInfo);
}
@@ -13387,8 +13346,7 @@ public class SemanticAnalyzer extends Ba
// 8. Convert Hive projections to Optiq
List<RexNode> optiqColLst = new ArrayList<RexNode>();
- RexNodeConverter rexNodeConv = new RexNodeConverter(m_cluster,
- srcRel.getRowType(),
+ RexNodeConverter rexNodeConv = new RexNodeConverter(m_cluster, srcRel.getRowType(),
buildHiveColNameToInputPosMap(col_list, inputRR), 0, false);
for (ExprNodeDesc colExpr : col_list) {
optiqColLst.add(rexNodeConv.convert(colExpr));
@@ -13406,20 +13364,18 @@ public class SemanticAnalyzer extends Ba
* Hive treats names that start with '_c' as internalNames; so change the
* names so we don't run into this issue when converting back to Hive AST.
*/
- List<String> oFieldNames = Lists.transform(columnNames,
- new Function<String, String>() {
- @Override
- public String apply(String hName) {
- return "_o_" + hName;
- }
- });
+ List<String> oFieldNames = Lists.transform(columnNames, new Function<String, String>() {
+ @Override
+ public String apply(String hName) {
+ return "_o_" + hName;
+ }
+ });
// 10.2 Build Optiq Rel Node for project using converted projections & col
// names
HiveRel selRel = HiveProjectRel.create(srcRel, optiqColLst, oFieldNames);
// 11. Keep track of colname-to-posmap && RR for new select
- this.m_relToHiveColNameOptiqPosMap.put(selRel,
- buildHiveToOptiqColumnMap(out_rwsch, selRel));
+ this.m_relToHiveColNameOptiqPosMap.put(selRel, buildHiveToOptiqColumnMap(out_rwsch, selRel));
this.m_relToHiveRR.put(selRel, out_rwsch);
return selRel;
@@ -13433,8 +13389,8 @@ public class SemanticAnalyzer extends Ba
RelNode qbexpr1Ops = genLogicalPlan(qbexpr.getQBExpr1());
RelNode qbexpr2Ops = genLogicalPlan(qbexpr.getQBExpr2());
- return genUnionLogicalPlan(qbexpr.getAlias(), qbexpr.getQBExpr1()
- .getAlias(), qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
+ return genUnionLogicalPlan(qbexpr.getAlias(), qbexpr.getQBExpr1().getAlias(), qbexpr1Ops,
+ qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
}
return null;
}
@@ -13510,8 +13466,7 @@ public class SemanticAnalyzer extends Ba
newRR.put(alias, tmp[1], newCi);
}
m_relToHiveRR.put(srcRel, newRR);
- m_relToHiveColNameOptiqPosMap.put(srcRel,
- buildHiveToOptiqColumnMap(newRR, srcRel));
+ m_relToHiveColNameOptiqPosMap.put(srcRel, buildHiveToOptiqColumnMap(newRR, srcRel));
}
// 7. Build Rel for OB Clause
@@ -13529,22 +13484,20 @@ public class SemanticAnalyzer extends Ba
return srcRel;
}
- private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel,
- Map<String, RelNode> aliasToRel)
+ private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel, Map<String, RelNode> aliasToRel)
throws SemanticException {
RelNode gbFilter = null;
QBParseInfo qbp = getQBParseInfo(qb);
- ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames()
- .iterator().next());
+ ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames().iterator().next());
if (havingClause != null)
- gbFilter = genFilterRelNode(qb, (ASTNode) havingClause.getChild(0), srcRel, aliasToRel, true);
+ gbFilter = genFilterRelNode(qb, (ASTNode) havingClause.getChild(0), srcRel, aliasToRel,
+ true);
return gbFilter;
}
- private ImmutableMap<String, Integer> buildHiveToOptiqColumnMap(
- RowResolver rr, RelNode rNode) {
+ private ImmutableMap<String, Integer> buildHiveToOptiqColumnMap(RowResolver rr, RelNode rNode) {
ImmutableMap.Builder<String, Integer> b = new ImmutableMap.Builder<String, Integer>();
int i = 0;
for (ColumnInfo ci : rr.getRowSchema().getSignature()) {
@@ -13558,14 +13511,12 @@ public class SemanticAnalyzer extends Ba
// Build a map of Hive column Names (ExprNodeColumnDesc Name)
// to the positions of those projections in the input
Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap = new HashMap<Integer, ExprNodeDesc>();
- ExprNodeDescUtils
- .getExprNodeColumnDesc(col_list, hashCodeTocolumnDescMap);
+ ExprNodeDescUtils.getExprNodeColumnDesc(col_list, hashCodeTocolumnDescMap);
ImmutableMap.Builder<String, Integer> hiveColNameToInputPosMapBuilder = new ImmutableMap.Builder<String, Integer>();
String exprNodecolName;
for (ExprNodeDesc exprDesc : hashCodeTocolumnDescMap.values()) {
exprNodecolName = ((ExprNodeColumnDesc) exprDesc).getColumn();
- hiveColNameToInputPosMapBuilder.put(exprNodecolName,
- inputRR.getPosition(exprNodecolName));
+ hiveColNameToInputPosMapBuilder.put(exprNodecolName, inputRR.getPosition(exprNodecolName));
}
return hiveColNameToInputPosMapBuilder.build();