You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/19 06:38:29 UTC

svn commit: r1626120 - in /hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql: optimizer/optiq/HiveTypeSystemImpl.java optimizer/optiq/translator/ASTConverter.java parse/SemanticAnalyzer.java

Author: gunther
Date: Fri Sep 19 04:38:28 2014
New Revision: 1626120

URL: http://svn.apache.org/r1626120
Log:
HIVE-8187: CBO: Change Optiq Type System Precision/scale to use Hive Type System Precision/Scale (Laljo John Pullokkaran via Gunther Hagleitner)

Added:
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveTypeSystemImpl.java
Modified:
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java
    hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Added: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveTypeSystemImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveTypeSystemImpl.java?rev=1626120&view=auto
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveTypeSystemImpl.java (added)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/HiveTypeSystemImpl.java Fri Sep 19 04:38:28 2014
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.optimizer.optiq;
+
+import org.eigenbase.reltype.RelDataTypeSystemImpl;
+import org.eigenbase.sql.type.SqlTypeName;
+
+public class HiveTypeSystemImpl extends RelDataTypeSystemImpl {
+  // TODO: This should come from type system; Currently there is no definition
+  // in type system for this.
+  private static final int MAX_DECIMAL_PRECISION     = 38;
+  private static final int MAX_DECIMAL_SCALE         = 38;
+  private static final int DEFAULT_DECIMAL_PRECISION = 10;
+  private static final int MAX_VARCHAR_PRECISION     = 65535;
+  private static final int MAX_CHAR_PRECISION        = 255;
+  private static final int MAX_BINARY_PRECISION      = Integer.MAX_VALUE;
+  private static final int MAX_TIMESTAMP_PRECISION   = 9;
+
+  @Override
+  public int getMaxScale(SqlTypeName typeName) {
+    switch (typeName) {
+    case DECIMAL:
+      return getMaxNumericScale();
+    case INTERVAL_DAY_TIME:
+    case INTERVAL_YEAR_MONTH:
+      return SqlTypeName.MAX_INTERVAL_FRACTIONAL_SECOND_PRECISION;
+    default:
+      return -1;
+    }
+  }
+
+  @Override
+  public int getDefaultPrecision(SqlTypeName typeName) {
+    switch (typeName) {
+    // Hive will always require user to specify exact sizes for char, varchar;
+    // Binary doesn't need any sizes; Decimal has the default of 10.
+    case CHAR:
+    case VARCHAR:
+    case BINARY:
+    case VARBINARY:
+    case TIME:
+    case TIMESTAMP:
+      return getMaxPrecision(typeName);
+    case DECIMAL:
+      return DEFAULT_DECIMAL_PRECISION;
+    case INTERVAL_DAY_TIME:
+    case INTERVAL_YEAR_MONTH:
+      return SqlTypeName.DEFAULT_INTERVAL_START_PRECISION;
+    default:
+      return -1;
+    }
+  }
+
+  @Override
+  public int getMaxPrecision(SqlTypeName typeName) {
+    switch (typeName) {
+    case DECIMAL:
+      return getMaxNumericPrecision();
+    case VARCHAR:
+      return MAX_VARCHAR_PRECISION;
+    case CHAR:
+      return MAX_CHAR_PRECISION;
+    case VARBINARY:
+    case BINARY:
+      return MAX_BINARY_PRECISION;
+    case TIME:
+    case TIMESTAMP:
+      return MAX_TIMESTAMP_PRECISION;
+    case INTERVAL_DAY_TIME:
+    case INTERVAL_YEAR_MONTH:
+      return SqlTypeName.MAX_INTERVAL_START_PRECISION;
+    default:
+      return -1;
+    }
+  }
+
+  @Override
+  public int getMaxNumericScale() {
+    return MAX_DECIMAL_SCALE;
+  }
+
+  @Override
+  public int getMaxNumericPrecision() {
+    return MAX_DECIMAL_PRECISION;
+  }
+}

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java?rev=1626120&r1=1626119&r2=1626120&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/ASTConverter.java Fri Sep 19 04:38:28 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.optimizer.optiq.translator;
 
+import java.math.BigDecimal;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -56,7 +57,6 @@ import org.eigenbase.rex.RexWindow;
 import org.eigenbase.rex.RexWindowBound;
 import org.eigenbase.sql.SqlKind;
 import org.eigenbase.sql.SqlOperator;
-import org.eigenbase.sql.type.BasicSqlType;
 import org.eigenbase.sql.type.SqlTypeName;
 
 import com.google.common.collect.Iterables;
@@ -79,7 +79,8 @@ public class ASTConverter {
     hiveAST = new HiveAST();
   }
 
-  public static ASTNode convert(final RelNode relNode, List<FieldSchema> resultSchema) throws OptiqSemanticException {
+  public static ASTNode convert(final RelNode relNode, List<FieldSchema> resultSchema)
+      throws OptiqSemanticException {
     SortRel sortrel = null;
     RelNode root = DerivedTableInjector.convertOpTree(relNode, resultSchema);
 
@@ -121,7 +122,8 @@ public class ASTConverter {
     if (groupBy != null) {
       ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_GROUPBY, "TOK_GROUPBY");
       for (int i : BitSets.toIter(groupBy.getGroupSet())) {
-        RexInputRef iRef = new RexInputRef(i, new BasicSqlType(SqlTypeName.ANY));
+        RexInputRef iRef = new RexInputRef(i, groupBy.getCluster().getTypeFactory()
+            .createSqlType(SqlTypeName.ANY));
         b.add(iRef.accept(new RexVisitor(schema)));
       }
 
@@ -142,12 +144,19 @@ public class ASTConverter {
      * 6. Project
      */
     ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT");
-    int i = 0;
 
-    for (RexNode r : select.getChildExps()) {
-      ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(new RexVisitor(schema)), select
-          .getRowType().getFieldNames().get(i++));
+    if (select.getChildExps().isEmpty()) {
+      RexLiteral r = select.getCluster().getRexBuilder().makeExactLiteral(new BigDecimal(1));
+      ASTNode selectExpr = ASTBuilder.selectExpr(ASTBuilder.literal(r), "1");
       b.add(selectExpr);
+    } else {
+      int i = 0;
+
+      for (RexNode r : select.getChildExps()) {
+        ASTNode selectExpr = ASTBuilder.selectExpr(r.accept(new RexVisitor(schema)), select
+            .getRowType().getFieldNames().get(i++));
+        b.add(selectExpr);
+      }
     }
     hiveAST.select = b.node();
 
@@ -292,9 +301,8 @@ public class ASTConverter {
 
     @Override
     public ASTNode visitFieldAccess(RexFieldAccess fieldAccess) {
-      return ASTBuilder
-      .construct(HiveParser.DOT, ".")
-      .add(super.visitFieldAccess(fieldAccess)).add(HiveParser.Identifier, fieldAccess.getField().getName()).node();
+      return ASTBuilder.construct(HiveParser.DOT, ".").add(super.visitFieldAccess(fieldAccess))
+          .add(HiveParser.Identifier, fieldAccess.getField().getName()).node();
     }
 
     @Override
@@ -509,7 +517,8 @@ public class ASTConverter {
             "TOK_FUNCTIONSTAR") : ASTBuilder.construct(HiveParser.TOK_FUNCTION, "TOK_FUNCTION");
         b.add(HiveParser.Identifier, agg.getAggregation().getName());
         for (int i : agg.getArgList()) {
-          RexInputRef iRef = new RexInputRef(i, new BasicSqlType(SqlTypeName.ANY));
+          RexInputRef iRef = new RexInputRef(i, gBy.getCluster().getTypeFactory()
+              .createSqlType(SqlTypeName.ANY));
           b.add(iRef.accept(new RexVisitor(src)));
         }
         add(new ColumnInfo(null, b.node()));
@@ -521,7 +530,7 @@ public class ASTConverter {
      * 1. ProjectRel will always be child of SortRel.<br>
      * 2. In Optiq every projection in ProjectRelBase is uniquely named
      * (unambigous) without using table qualifier (table name).<br>
-     *
+     * 
      * @param order
      *          Hive Sort Rel Node
      * @return Schema

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1626120&r1=1626119&r2=1626120&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Sep 19 04:38:28 2014
@@ -115,6 +115,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import org.apache.hadoop.hive.ql.optimizer.optiq.HiveDefaultRelMetadataProvider;
 import org.apache.hadoop.hive.ql.optimizer.optiq.HiveOptiqUtil;
+import org.apache.hadoop.hive.ql.optimizer.optiq.HiveTypeSystemImpl;
 import org.apache.hadoop.hive.ql.optimizer.optiq.OptiqSemanticException;
 import org.apache.hadoop.hive.ql.optimizer.optiq.RelOptHiveTable;
 import org.apache.hadoop.hive.ql.optimizer.optiq.TraitsUtil;
@@ -12177,7 +12178,8 @@ public class SemanticAnalyzer extends Ba
       this.partitionCache = partitionCache;
 
       try {
-        optimizedOptiqPlan = Frameworks.withPlanner(this);
+        optimizedOptiqPlan = Frameworks.withPlanner(this,
+            Frameworks.newConfigBuilder().typeSystem(new HiveTypeSystemImpl()).build());
       } catch (Exception e) {
         if (semanticException != null)
           throw semanticException;