You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@impala.apache.org by kw...@apache.org on 2016/09/30 02:14:23 UTC

[06/61] [partial] incubator-impala git commit: IMPALA-3786: Replace "cloudera" with "apache" (part 1)

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/ArithmeticExpr.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ArithmeticExpr.java b/fe/src/main/java/org/apache/impala/analysis/ArithmeticExpr.java
new file mode 100644
index 0000000..bf8b0ea
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/ArithmeticExpr.java
@@ -0,0 +1,268 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import com.cloudera.impala.catalog.Db;
+import com.cloudera.impala.catalog.Function.CompareMode;
+import com.cloudera.impala.catalog.ScalarFunction;
+import com.cloudera.impala.catalog.ScalarType;
+import com.cloudera.impala.catalog.Type;
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.thrift.TExprNode;
+import com.cloudera.impala.thrift.TExprNodeType;
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+public class ArithmeticExpr extends Expr {
+  enum OperatorPosition {
+    BINARY_INFIX,
+    UNARY_PREFIX,
+    UNARY_POSTFIX,
+  }
+
+  enum Operator {
+    MULTIPLY("*", "multiply", OperatorPosition.BINARY_INFIX),
+    DIVIDE("/", "divide", OperatorPosition.BINARY_INFIX),
+    MOD("%", "mod", OperatorPosition.BINARY_INFIX),
+    INT_DIVIDE("DIV", "int_divide", OperatorPosition.BINARY_INFIX),
+    ADD("+", "add", OperatorPosition.BINARY_INFIX),
+    SUBTRACT("-", "subtract", OperatorPosition.BINARY_INFIX),
+    BITAND("&", "bitand", OperatorPosition.BINARY_INFIX),
+    BITOR("|", "bitor", OperatorPosition.BINARY_INFIX),
+    BITXOR("^", "bitxor", OperatorPosition.BINARY_INFIX),
+    BITNOT("~", "bitnot", OperatorPosition.UNARY_PREFIX),
+    FACTORIAL("!", "factorial", OperatorPosition.UNARY_POSTFIX);
+
+    private final String description_;
+    private final String name_;
+    private final OperatorPosition pos_;
+
+    private Operator(String description, String name, OperatorPosition pos) {
+      this.description_ = description;
+      this.name_ = name;
+      this.pos_ = pos;
+    }
+
+    @Override
+    public String toString() { return description_; }
+    public String getName() { return name_; }
+    public OperatorPosition getPos() { return pos_; }
+
+    public boolean isUnary() {
+      return pos_ == OperatorPosition.UNARY_PREFIX ||
+             pos_ == OperatorPosition.UNARY_POSTFIX;
+    }
+
+    public boolean isBinary() {
+      return pos_ == OperatorPosition.BINARY_INFIX;
+    }
+  }
+
+  private final Operator op_;
+
+  public Operator getOp() { return op_; }
+
+  public ArithmeticExpr(Operator op, Expr e1, Expr e2) {
+    super();
+    this.op_ = op;
+    Preconditions.checkNotNull(e1);
+    children_.add(e1);
+    Preconditions.checkArgument((op.isUnary() && e2 == null) ||
+        (op.isBinary() && e2 != null));
+    if (e2 != null) children_.add(e2);
+  }
+
+  /**
+   * Copy c'tor used in clone().
+   */
+  protected ArithmeticExpr(ArithmeticExpr other) {
+    super(other);
+    op_ = other.op_;
+  }
+
+  public static void initBuiltins(Db db) {
+    for (Type t: Type.getNumericTypes()) {
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.MULTIPLY.getName(), Lists.newArrayList(t, t), t));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.ADD.getName(), Lists.newArrayList(t, t), t));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.SUBTRACT.getName(), Lists.newArrayList(t, t), t));
+    }
+    db.addBuiltin(ScalarFunction.createBuiltinOperator(
+        Operator.DIVIDE.getName(),
+        Lists.<Type>newArrayList(Type.DOUBLE, Type.DOUBLE),
+        Type.DOUBLE));
+    db.addBuiltin(ScalarFunction.createBuiltinOperator(
+        Operator.DIVIDE.getName(),
+        Lists.<Type>newArrayList(Type.DECIMAL, Type.DECIMAL),
+        Type.DECIMAL));
+
+    /*
+     * MOD(), FACTORIAL(), BITAND(), BITOR(), BITXOR(), and BITNOT() are registered as
+     * builtins, see impala_functions.py
+     */
+    for (Type t: Type.getIntegerTypes()) {
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.INT_DIVIDE.getName(), Lists.newArrayList(t, t), t));
+    }
+  }
+
+  @Override
+  public String debugString() {
+    return Objects.toStringHelper(this)
+        .add("op", op_)
+        .addValue(super.debugString())
+        .toString();
+  }
+
+  @Override
+  public String toSqlImpl() {
+    if (children_.size() == 1) {
+      if (op_.getPos() == OperatorPosition.UNARY_PREFIX) {
+        return op_.toString() + getChild(0).toSql();
+      } else {
+        assert(op_.getPos() == OperatorPosition.UNARY_POSTFIX);
+        return getChild(0).toSql() + op_.toString();
+      }
+    } else {
+      Preconditions.checkState(children_.size() == 2);
+      return getChild(0).toSql() + " " + op_.toString() + " " + getChild(1).toSql();
+    }
+  }
+
+  @Override
+  protected void toThrift(TExprNode msg) {
+    msg.node_type = TExprNodeType.FUNCTION_CALL;
+  }
+
+  /**
+   * Inserts a cast from child[childIdx] to targetType if one is necessary.
+   * Note this is different from Expr.castChild() since arithmetic for decimals
+   * the cast is handled as part of the operator and in general, the return type
+   * does not match the input types.
+   */
+  void castChild(int childIdx, Type targetType) throws AnalysisException {
+    Type t = getChild(childIdx).getType();
+    if (t.matchesType(targetType)) return;
+    if (targetType.isDecimal() && !t.isNull()) {
+      Preconditions.checkState(t.isScalarType());
+      targetType = ((ScalarType) t).getMinResolutionDecimal();
+    }
+    castChild(targetType, childIdx);
+  }
+
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    super.analyze(analyzer);
+    for (Expr child: children_) {
+      Expr operand = (Expr) child;
+      if (!operand.type_.isNumericType() && !operand.type_.isNull()) {
+        String errMsg = "Arithmetic operation requires numeric operands: " + toSql();
+        if (operand instanceof Subquery && !operand.type_.isScalarType()) {
+          errMsg = "Subquery must return a single row: " + operand.toSql();
+        }
+        throw new AnalysisException(errMsg);
+      }
+    }
+
+    convertNumericLiteralsFromDecimal(analyzer);
+    Type t0 = getChild(0).getType();
+    Type t1 = null;
+    if (op_.isUnary()) {
+      Preconditions.checkState(children_.size() == 1);
+    } else if (op_.isBinary()) {
+      Preconditions.checkState(children_.size() == 2);
+      t1 = getChild(1).getType();
+    }
+    if (hasChildCosts()) evalCost_ = getChildCosts() + ARITHMETIC_OP_COST;
+
+    String fnName = op_.getName();
+    switch (op_) {
+      case ADD:
+      case SUBTRACT:
+      case DIVIDE:
+      case MULTIPLY:
+      case MOD:
+        type_ = TypesUtil.getArithmeticResultType(t0, t1, op_);
+        // If both of the children are null, we'll default to the DOUBLE version of the
+        // operator. This prevents the BE from seeing NULL_TYPE.
+        if (type_.isNull()) type_ = Type.DOUBLE;
+        break;
+
+      case INT_DIVIDE:
+      case BITAND:
+      case BITOR:
+      case BITXOR:
+        if ((!t0.isNull() & !t0.isIntegerType()) ||
+            (!t1.isNull() && !t1.isIntegerType())) {
+          throw new AnalysisException("Invalid non-integer argument to operation '" +
+              op_.toString() + "': " + this.toSql());
+        }
+        type_ = Type.getAssignmentCompatibleType(t0, t1, false);
+        // If both of the children are null, we'll default to the INT version of the
+        // operator. This prevents the BE from seeing NULL_TYPE.
+        if (type_.isNull()) type_ = Type.INT;
+        Preconditions.checkState(type_.isIntegerType());
+        break;
+      case BITNOT:
+      case FACTORIAL:
+        if (!t0.isNull() && !t0.isIntegerType()) {
+          throw new AnalysisException("'" + op_.toString() + "'" +
+              " operation only allowed on integer types: " + toSql());
+        }
+        // Special-case NULL to resolve to the appropriate type.
+        if (op_ == Operator.BITNOT) {
+          if (t0.isNull()) castChild(0, Type.INT);
+        } else {
+          assert(op_ == Operator.FACTORIAL);
+          if (t0.isNull()) castChild(0, Type.BIGINT);
+        }
+        fn_ = getBuiltinFunction(analyzer, op_.getName(), collectChildReturnTypes(),
+            CompareMode.IS_SUPERTYPE_OF);
+        Preconditions.checkNotNull(fn_);
+        castForFunctionCall(false);
+        type_ = fn_.getReturnType();
+        return;
+      default:
+        // the programmer forgot to deal with a case
+        Preconditions.checkState(false,
+            "Unknown arithmetic operation " + op_.toString() + " in: " + this.toSql());
+        break;
+    }
+
+    // Don't cast from decimal to decimal. The BE function can just handle this.
+    if (!(type_.isDecimal() && t0.isDecimal())) castChild(0, type_);
+    if (!(type_.isDecimal() && t1.isDecimal())) castChild(1, type_);
+    t0 = getChild(0).getType();
+    t1 = getChild(1).getType();
+
+    fn_ = getBuiltinFunction(analyzer, fnName, collectChildReturnTypes(),
+        CompareMode.IS_IDENTICAL);
+    if (fn_ == null) {
+      Preconditions.checkState(false, String.format("No match " +
+          "for '%s' with operand types %s and %s", toSql(), t0, t1));
+    }
+    Preconditions.checkState(type_.matchesType(fn_.getReturnType()));
+  }
+
+  @Override
+  public Expr clone() { return new ArithmeticExpr(this); }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java b/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
new file mode 100644
index 0000000..4e88014
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
@@ -0,0 +1,49 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import com.cloudera.impala.authorization.User;
+import com.cloudera.impala.common.AnalysisException;
+import com.google.common.base.Strings;
+
+/**
+ * Base class for all authorization statements - CREATE/DROP/SHOW ROLE, GRANT/REVOKE
+ * ROLE/privilege, etc.
+ */
+public class AuthorizationStmt extends StatementBase {
+  // Set during analysis
+  protected User requestingUser_;
+
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (!analyzer.getAuthzConfig().isEnabled()) {
+      throw new AnalysisException("Authorization is not enabled. To enable " +
+          "authorization restart Impala with the --server_name=<name> flag.");
+    }
+    if (analyzer.getAuthzConfig().isFileBasedPolicy()) {
+      throw new AnalysisException("Cannot execute authorization statement using a file" +
+          " based policy. To disable file based policies, restart Impala without the " +
+          "-authorization_policy_file flag set.");
+    }
+    if (Strings.isNullOrEmpty(analyzer.getUser().getName())) {
+      throw new AnalysisException("Cannot execute authorization statement with an " +
+          "empty username.");
+    }
+    requestingUser_ = analyzer.getUser();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/BaseTableRef.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/BaseTableRef.java b/fe/src/main/java/org/apache/impala/analysis/BaseTableRef.java
new file mode 100644
index 0000000..69780e0
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/BaseTableRef.java
@@ -0,0 +1,98 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import com.cloudera.impala.catalog.HdfsTable;
+import com.cloudera.impala.catalog.Table;
+import com.cloudera.impala.common.AnalysisException;
+import com.google.common.base.Preconditions;
+
+/**
+ * Represents a reference to an actual table, such as an Hdfs or HBase table.
+ * BaseTableRefs are instantiated as a result of table resolution during analysis
+ * of a SelectStmt.
+ */
+public class BaseTableRef extends TableRef {
+
+  /**
+   * Create a BaseTableRef from the original unresolved table ref as well as
+   * its resolved path. Sets table aliases and join-related attributes.
+   */
+  public BaseTableRef(TableRef tableRef, Path resolvedPath) {
+    super(tableRef);
+    Preconditions.checkState(resolvedPath.isResolved());
+    Preconditions.checkState(resolvedPath.isRootedAtTable());
+    resolvedPath_ = resolvedPath;
+    // Set implicit aliases if no explicit one was given.
+    if (hasExplicitAlias()) return;
+    aliases_ = new String[] {
+        getTable().getTableName().toString().toLowerCase(),
+        getTable().getName().toLowerCase() };
+  }
+
+  /**
+   * C'tor for cloning.
+   */
+  private BaseTableRef(BaseTableRef other) {
+    super(other);
+  }
+
+  /**
+   * Register this table ref and then analyze the Join clause.
+   */
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    analyzer.registerAuthAndAuditEvent(resolvedPath_.getRootTable(), analyzer);
+    desc_ = analyzer.registerTableRef(this);
+    isAnalyzed_ = true;
+    analyzeHints(analyzer);
+    analyzeJoin(analyzer);
+    analyzeSkipHeaderLineCount();
+  }
+
+  @Override
+  protected String tableRefToSql() {
+    // Enclose the alias in quotes if Hive cannot parse it without quotes.
+    // This is needed for view compatibility between Impala and Hive.
+    String aliasSql = null;
+    String alias = getExplicitAlias();
+    if (alias != null) aliasSql = ToSqlUtils.getIdentSql(alias);
+    String tableHintsSql = ToSqlUtils.getPlanHintsSql(tableHints_);
+    return getTable().getTableName().toSql() +
+        ((aliasSql != null) ? " " + aliasSql : "") +
+        (tableHintsSql != "" ? " " + tableHintsSql : "");
+  }
+
+  public String debugString() { return tableRefToSql(); }
+  @Override
+  protected TableRef clone() { return new BaseTableRef(this); }
+
+  /**
+   * Analyze the 'skip.header.line.count' property.
+   */
+  private void analyzeSkipHeaderLineCount() throws AnalysisException {
+    Table table = getTable();
+    if (!(table instanceof HdfsTable)) return;
+    HdfsTable hdfsTable = (HdfsTable)table;
+
+    StringBuilder error = new StringBuilder();
+    hdfsTable.parseSkipHeaderLineCount(error);
+    if (error.length() > 0) throw new AnalysisException(error.toString());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/BetweenPredicate.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/BetweenPredicate.java b/fe/src/main/java/org/apache/impala/analysis/BetweenPredicate.java
new file mode 100644
index 0000000..d76a4c6
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/BetweenPredicate.java
@@ -0,0 +1,158 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.thrift.TExprNode;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+/**
+ * Class describing between predicates. After successful analysis, we rewrite
+ * the between predicate to a conjunctive/disjunctive compound predicate
+ * to be handed to the backend.
+ */
+public class BetweenPredicate extends Predicate {
+
+  private final boolean isNotBetween_;
+
+  // After successful analysis, we rewrite this between predicate
+  // into a conjunctive/disjunctive compound predicate.
+  private CompoundPredicate rewrittenPredicate_;
+
+  // Children of the BetweenPredicate, since this.children should hold the children
+  // of the rewritten predicate to make sure toThrift() picks up the right ones.
+  private ArrayList<Expr> originalChildren_ = Lists.newArrayList();
+
+  // First child is the comparison expr which should be in [lowerBound, upperBound].
+  public BetweenPredicate(Expr compareExpr, Expr lowerBound, Expr upperBound,
+      boolean isNotBetween) {
+    originalChildren_.add(compareExpr);
+    originalChildren_.add(lowerBound);
+    originalChildren_.add(upperBound);
+    this.isNotBetween_ = isNotBetween;
+  }
+
+  /**
+   * Copy c'tor used in clone().
+   */
+  protected BetweenPredicate(BetweenPredicate other) {
+    super(other);
+    isNotBetween_ = other.isNotBetween_;
+    originalChildren_ = Expr.cloneList(other.originalChildren_);
+    if (other.rewrittenPredicate_ != null) {
+      rewrittenPredicate_ = (CompoundPredicate) other.rewrittenPredicate_.clone();
+    }
+  }
+
+  public CompoundPredicate getRewrittenPredicate() {
+    Preconditions.checkState(isAnalyzed_);
+    return rewrittenPredicate_;
+  }
+  public ArrayList<Expr> getOriginalChildren() { return originalChildren_; }
+
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    super.analyze(analyzer);
+    if (originalChildren_.get(0) instanceof Subquery &&
+        (originalChildren_.get(1) instanceof Subquery ||
+         originalChildren_.get(2) instanceof Subquery)) {
+      throw new AnalysisException("Comparison between subqueries is not " +
+          "supported in a between predicate: " + toSqlImpl());
+    }
+    analyzer.castAllToCompatibleType(originalChildren_);
+
+    // Rewrite between predicate into a conjunctive/disjunctive compound predicate.
+    if (isNotBetween_) {
+      // Rewrite into disjunction.
+      Predicate lower = new BinaryPredicate(BinaryPredicate.Operator.LT,
+          originalChildren_.get(0), originalChildren_.get(1));
+      Predicate upper = new BinaryPredicate(BinaryPredicate.Operator.GT,
+          originalChildren_.get(0), originalChildren_.get(2));
+      rewrittenPredicate_ =
+          new CompoundPredicate(CompoundPredicate.Operator.OR, lower, upper);
+    } else {
+      // Rewrite into conjunction.
+      Predicate lower = new BinaryPredicate(BinaryPredicate.Operator.GE,
+          originalChildren_.get(0), originalChildren_.get(1));
+      Predicate upper = new BinaryPredicate(BinaryPredicate.Operator.LE,
+          originalChildren_.get(0), originalChildren_.get(2));
+      rewrittenPredicate_ =
+          new CompoundPredicate(CompoundPredicate.Operator.AND, lower, upper);
+    }
+
+    try {
+      rewrittenPredicate_.analyze(analyzer);
+      fn_ = rewrittenPredicate_.fn_;
+    } catch (AnalysisException e) {
+      // We should have already guaranteed that analysis will succeed.
+      Preconditions.checkState(false, "Analysis failed in rewritten between predicate");
+    }
+
+    // Make sure toThrift() picks up the children of the rewritten predicate.
+    children_ = rewrittenPredicate_.getChildren();
+    // Since the only child is a CompoundPredicate expressing the comparison,
+    // the cost of the comparison is fully captured by the children's cost.
+    evalCost_ = getChildCosts();
+    isAnalyzed_ = true;
+  }
+
+  @Override
+  public List<Expr> getConjuncts() {
+    return rewrittenPredicate_.getConjuncts();
+  }
+
+  @Override
+  protected void toThrift(TExprNode msg) {
+    rewrittenPredicate_.toThrift(msg);
+  }
+
+  @Override
+  public String toSqlImpl() {
+    String notStr = (isNotBetween_) ? "NOT " : "";
+    return originalChildren_.get(0).toSql() + " " + notStr + "BETWEEN " +
+        originalChildren_.get(1).toSql() + " AND " + originalChildren_.get(2).toSql();
+  }
+
+  /**
+   * Also substitute the exprs in originalChildren when cloning.
+   */
+  @Override
+  protected Expr substituteImpl(ExprSubstitutionMap smap, Analyzer analyzer)
+      throws AnalysisException {
+    BetweenPredicate clone = (BetweenPredicate) super.substituteImpl(smap, analyzer);
+    Preconditions.checkNotNull(clone);
+    clone.originalChildren_ =
+        Expr.substituteList(originalChildren_, smap, analyzer, false);
+    return clone;
+  }
+
+  @Override
+  public Expr clone() { return new BetweenPredicate(this); }
+
+  @Override
+  public Expr reset() {
+    super.reset();
+    originalChildren_ = Expr.resetList(originalChildren_);
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/BinaryPredicate.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/BinaryPredicate.java b/fe/src/main/java/org/apache/impala/analysis/BinaryPredicate.java
new file mode 100644
index 0000000..35d03e1
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/BinaryPredicate.java
@@ -0,0 +1,388 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.cloudera.impala.catalog.Db;
+import com.cloudera.impala.catalog.Function.CompareMode;
+import com.cloudera.impala.catalog.ScalarFunction;
+import com.cloudera.impala.catalog.Type;
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.common.Pair;
+import com.cloudera.impala.common.Reference;
+import com.cloudera.impala.extdatasource.thrift.TComparisonOp;
+import com.cloudera.impala.thrift.TExprNode;
+import com.cloudera.impala.thrift.TExprNodeType;
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Predicates;
+import com.google.common.collect.Lists;
+
+/**
+ * Most predicates with two operands.
+ *
+ */
+public class BinaryPredicate extends Predicate {
+  private final static Logger LOG = LoggerFactory.getLogger(BinaryPredicate.class);
+
+  // true if this BinaryPredicate is inferred from slot equivalences, false otherwise.
+  private boolean isInferred_ = false;
+
+  public enum Operator {
+    EQ("=", "eq", TComparisonOp.EQ),
+    NE("!=", "ne", TComparisonOp.NE),
+    LE("<=", "le", TComparisonOp.LE),
+    GE(">=", "ge", TComparisonOp.GE),
+    LT("<", "lt", TComparisonOp.LT),
+    GT(">", "gt", TComparisonOp.GT),
+    DISTINCT_FROM("IS DISTINCT FROM", "distinctfrom", TComparisonOp.DISTINCT_FROM),
+    NOT_DISTINCT("IS NOT DISTINCT FROM", "notdistinct", TComparisonOp.NOT_DISTINCT),
+    // Same as EQ, except it returns True if the rhs is NULL. There is no backend
+    // function for this. The functionality is embedded in the hash-join
+    // implementation.
+    NULL_MATCHING_EQ("=", "null_matching_eq", TComparisonOp.EQ);
+
+    private final String description_;
+    private final String name_;
+    private final TComparisonOp thriftOp_;
+
+    private Operator(String description, String name, TComparisonOp thriftOp) {
+      this.description_ = description;
+      this.name_ = name;
+      this.thriftOp_ = thriftOp;
+    }
+
+    @Override
+    public String toString() { return description_; }
+    public String getName() { return name_; }
+    public TComparisonOp getThriftOp() { return thriftOp_; }
+    public boolean isEquivalence() { return this == EQ || this == NOT_DISTINCT; }
+
+    public Operator converse() {
+      switch (this) {
+        case EQ: return EQ;
+        case NE: return NE;
+        case LE: return GE;
+        case GE: return LE;
+        case LT: return GT;
+        case GT: return LT;
+        case DISTINCT_FROM: return DISTINCT_FROM;
+        case NOT_DISTINCT: return NOT_DISTINCT;
+        case NULL_MATCHING_EQ:
+          throw new IllegalStateException("Not implemented");
+        default: throw new IllegalStateException("Invalid operator");
+      }
+    }
+  }
+
+  public static void initBuiltins(Db db) {
+    for (Type t: Type.getSupportedTypes()) {
+      if (t.isNull()) continue; // NULL is handled through type promotion.
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.EQ.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.NE.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.LE.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.GE.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.LT.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          Operator.GT.getName(), Lists.newArrayList(t, t), Type.BOOLEAN));
+    }
+  }
+
+  /**
+   * Normalizes a 'predicate' consisting of an uncast SlotRef and a constant Expr into
+   * the following form: <SlotRef> <Op> <LiteralExpr>
+   * If 'predicate' cannot be expressed in this way, null is returned.
+   */
+  public static BinaryPredicate normalizeSlotRefComparison(BinaryPredicate predicate,
+      Analyzer analyzer) {
+    SlotRef ref = null;
+    if (predicate.getChild(0) instanceof SlotRef) {
+      ref = (SlotRef) predicate.getChild(0);
+    } else if (predicate.getChild(1) instanceof SlotRef) {
+      ref = (SlotRef) predicate.getChild(1);
+    }
+
+    if (ref == null) return null;
+    if (ref != predicate.getChild(0)) {
+      Preconditions.checkState(ref == predicate.getChild(1));
+      predicate = new BinaryPredicate(predicate.getOp().converse(), ref,
+          predicate.getChild(0));
+      predicate.analyzeNoThrow(analyzer);
+    }
+
+    try {
+      predicate.foldConstantChildren(analyzer);
+    } catch (AnalysisException ex) {
+      // Throws if the expression cannot be evaluated by the BE.
+      return null;
+    }
+    predicate.analyzeNoThrow(analyzer);
+    if (!(predicate.getChild(1) instanceof LiteralExpr)) return null;
+    return predicate;
+  }
+
+  private Operator op_;
+
+  public Operator getOp() { return op_; }
+  public void setOp(Operator op) { op_ = op; }
+
+  public BinaryPredicate(Operator op, Expr e1, Expr e2) {
+    super();
+    this.op_ = op;
+    Preconditions.checkNotNull(e1);
+    children_.add(e1);
+    Preconditions.checkNotNull(e2);
+    children_.add(e2);
+  }
+
+  protected BinaryPredicate(BinaryPredicate other) {
+    super(other);
+    op_ = other.op_;
+    isInferred_ = other.isInferred_;
+  }
+
+  public boolean isNullMatchingEq() { return op_ == Operator.NULL_MATCHING_EQ; }
+
+  public boolean isInferred() { return isInferred_; }
+  public void setIsInferred() { isInferred_ = true; }
+
+  @Override
+  public String toSqlImpl() {
+    return getChild(0).toSql() + " " + op_.toString() + " " + getChild(1).toSql();
+  }
+
+  @Override
+  protected void toThrift(TExprNode msg) {
+    Preconditions.checkState(children_.size() == 2);
+    // Cannot serialize a nested predicate.
+    Preconditions.checkState(!contains(Subquery.class));
+    // This check is important because we often clone and/or evaluate predicates,
+    // and it's easy to get the casting logic wrong, e.g., cloned predicates
+    // with expr substitutions need to be re-analyzed with reanalyze().
+    Preconditions.checkState(getChild(0).getType().getPrimitiveType() ==
+                             getChild(1).getType().getPrimitiveType(),
+        "child 0 type: " + getChild(0).getType() +
+        " child 1 type: " + getChild(1).getType());
+    msg.node_type = TExprNodeType.FUNCTION_CALL;
+  }
+
+  @Override
+  public String debugString() {
+    return Objects.toStringHelper(this)
+        .add("op", op_)
+        .addValue(super.debugString())
+        .toString();
+  }
+
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    super.analyze(analyzer);
+
+    convertNumericLiteralsFromDecimal(analyzer);
+    String opName = op_.getName().equals("null_matching_eq") ? "eq" : op_.getName();
+    fn_ = getBuiltinFunction(analyzer, opName, collectChildReturnTypes(),
+        CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
+    if (fn_ == null) {
+      // Construct an appropriate error message and throw an AnalysisException.
+      String errMsg = "operands of type " + getChild(0).getType().toSql() + " and " +
+            getChild(1).getType().toSql()  + " are not comparable: " + toSql();
+
+      // Check if any of the children is a Subquery that does not return a
+      // scalar.
+      for (Expr expr: children_) {
+        if (expr instanceof Subquery && !expr.getType().isScalarType()) {
+          errMsg = "Subquery must return a single row: " + expr.toSql();
+          break;
+        }
+      }
+
+      throw new AnalysisException(errMsg);
+    }
+    Preconditions.checkState(fn_.getReturnType().isBoolean());
+
+    ArrayList<Expr> subqueries = Lists.newArrayList();
+    collectAll(Predicates.instanceOf(Subquery.class), subqueries);
+    if (subqueries.size() > 1) {
+      // TODO Remove that restriction when we add support for independent subquery
+      // evaluation.
+      throw new AnalysisException("Multiple subqueries are not supported in binary " +
+          "predicates: " + toSql());
+    }
+    if (contains(ExistsPredicate.class)) {
+      throw new AnalysisException("EXISTS subquery predicates are not " +
+          "supported in binary predicates: " + toSql());
+    }
+
+    List<InPredicate> inPredicates = Lists.newArrayList();
+    collect(InPredicate.class, inPredicates);
+    for (InPredicate inPredicate: inPredicates) {
+      if (inPredicate.contains(Subquery.class)) {
+        throw new AnalysisException("IN subquery predicates are not supported in " +
+            "binary predicates: " + toSql());
+      }
+    }
+
+    // Don't perform any casting for predicates with subqueries here. Any casting
+    // required will be performed when the subquery is unnested.
+    if (!contains(Subquery.class)) castForFunctionCall(true);
+
+    // Determine selectivity
+    // TODO: Compute selectivity for nested predicates.
+    // TODO: Improve estimation using histograms.
+    Reference<SlotRef> slotRefRef = new Reference<SlotRef>();
+    if ((op_ == Operator.EQ || op_ == Operator.NOT_DISTINCT)
+        && isSingleColumnPredicate(slotRefRef, null)) {
+      long distinctValues = slotRefRef.getRef().getNumDistinctValues();
+      if (distinctValues > 0) {
+        selectivity_ = 1.0 / distinctValues;
+        selectivity_ = Math.max(0, Math.min(1, selectivity_));
+      }
+    }
+
+    // Compute cost.
+    if (hasChildCosts()) {
+      if (getChild(0).getType().isFixedLengthType()) {
+        evalCost_ = getChildCosts() + BINARY_PREDICATE_COST;
+      } else if (getChild(0).getType().isStringType()) {
+        evalCost_ = getChildCosts() +
+            (float) (getAvgStringLength(getChild(0)) + getAvgStringLength(getChild(1)) *
+            BINARY_PREDICATE_COST);
+      } else {
+        //TODO(tmarshall): Handle other var length types here.
+        evalCost_ = getChildCosts() + VAR_LEN_BINARY_PREDICATE_COST;
+      }
+    }
+  }
+
+  /**
+   * If predicate is of the form "<slotref> <op> <expr>", returns expr,
+   * otherwise returns null. Slotref may be wrapped in a CastExpr.
+   * TODO: revisit CAST handling at the caller
+   */
+  public Expr getSlotBinding(SlotId id) {
+    // check left operand
+    SlotRef slotRef = getChild(0).unwrapSlotRef(false);
+    if (slotRef != null && slotRef.getSlotId() == id) return getChild(1);
+    // check right operand
+    slotRef = getChild(1).unwrapSlotRef(false);
+    if (slotRef != null && slotRef.getSlotId() == id) return getChild(0);
+    return null;
+  }
+
+  /**
+   * If e is an equality predicate between two slots that only require implicit
+   * casts, returns those two slots; otherwise returns null.
+   */
+  public static Pair<SlotId, SlotId> getEqSlots(Expr e) {
+    if (!(e instanceof BinaryPredicate)) return null;
+    return ((BinaryPredicate) e).getEqSlots();
+  }
+
+  /**
+   * If this is an equality predicate between two slots that only require implicit
+   * casts, returns those two slots; otherwise returns null.
+   */
+  @Override
+  public Pair<SlotId, SlotId> getEqSlots() {
+    if (op_ != Operator.EQ) return null;
+    SlotRef lhs = getChild(0).unwrapSlotRef(true);
+    if (lhs == null) return null;
+    SlotRef rhs = getChild(1).unwrapSlotRef(true);
+    if (rhs == null) return null;
+    return new Pair<SlotId, SlotId>(lhs.getSlotId(), rhs.getSlotId());
+  }
+
+  /**
+   * If predicate is of the form "<SlotRef> op <Expr>" or "<Expr> op <SlotRef>",
+   * returns the SlotRef, otherwise returns null.
+   */
+  @Override
+  public SlotRef getBoundSlot() {
+    SlotRef slotRef = getChild(0).unwrapSlotRef(true);
+    if (slotRef != null) return slotRef;
+    return getChild(1).unwrapSlotRef(true);
+  }
+
+  /**
+   * Negates a BinaryPredicate.
+   */
+  @Override
+  public Expr negate() {
+    Operator newOp = null;
+    switch (op_) {
+      case EQ:
+        newOp = Operator.NE;
+        break;
+      case NE:
+        newOp = Operator.EQ;
+        break;
+      case LT:
+        newOp = Operator.GE;
+        break;
+      case LE:
+        newOp = Operator.GT;
+        break;
+      case GE:
+        newOp = Operator.LT;
+        break;
+      case GT:
+        newOp = Operator.LE;
+        break;
+      case DISTINCT_FROM:
+        newOp = Operator.NOT_DISTINCT;
+        break;
+      case NOT_DISTINCT:
+        newOp = Operator.DISTINCT_FROM;
+        break;
+      case NULL_MATCHING_EQ:
+        throw new IllegalStateException("Not implemented");
+    }
+    return new BinaryPredicate(newOp, getChild(0), getChild(1));
+  }
+
+  /**
+   * Swaps the first with the second child in-place. Only valid to call for
+   * equivalence and not equal predicates.
+   */
+  public void reverse() {
+    Preconditions.checkState(op_.isEquivalence() || op_ == Operator.NE);
+    Collections.swap(children_, 0, 1);
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) return false;
+    BinaryPredicate other = (BinaryPredicate) obj;
+    return op_.equals(other.op_);
+  }
+
+  @Override
+  public Expr clone() { return new BinaryPredicate(this); }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/BoolLiteral.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/BoolLiteral.java b/fe/src/main/java/org/apache/impala/analysis/BoolLiteral.java
new file mode 100644
index 0000000..03b2b1f
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/BoolLiteral.java
@@ -0,0 +1,113 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import com.cloudera.impala.catalog.Type;
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.thrift.TBoolLiteral;
+import com.cloudera.impala.thrift.TExprNode;
+import com.cloudera.impala.thrift.TExprNodeType;
+import com.google.common.base.Objects;
+
+public class BoolLiteral extends LiteralExpr {
+  private final boolean value_;
+
+  public BoolLiteral(boolean value) {
+    this.value_ = value;
+    type_ = Type.BOOLEAN;
+    evalCost_ = LITERAL_COST;
+  }
+
+  public BoolLiteral(String value) throws AnalysisException {
+    type_ = Type.BOOLEAN;
+    evalCost_ = LITERAL_COST;
+    if (value.toLowerCase().equals("true")) {
+      this.value_ = true;
+    } else if (value.toLowerCase().equals("false")) {
+      this.value_ = false;
+    } else {
+      throw new AnalysisException("invalid BOOLEAN literal: " + value);
+    }
+  }
+
+  /**
+   * Copy c'tor used in clone.
+   */
+  protected BoolLiteral(BoolLiteral other) {
+    super(other);
+    value_ = other.value_;
+  }
+
+  @Override
+  public String debugString() {
+    return Objects.toStringHelper(this)
+        .add("value", value_)
+        .toString();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) {
+      return false;
+    }
+    return ((BoolLiteral) obj).value_ == value_;
+  }
+
+  @Override
+  public int hashCode() { return value_ ? 1 : 0; }
+
+  public boolean getValue() { return value_; }
+
+  @Override
+  public String toSqlImpl() {
+    return getStringValue();
+  }
+
+  @Override
+  public String getStringValue() {
+    return value_ ? "TRUE" : "FALSE";
+  }
+
+  @Override
+  protected void toThrift(TExprNode msg) {
+    msg.node_type = TExprNodeType.BOOL_LITERAL;
+    msg.bool_literal = new TBoolLiteral(value_);
+  }
+
+  @Override
+  protected Expr uncheckedCastTo(Type targetType) throws AnalysisException {
+    if (targetType.equals(this.type_)) {
+      return this;
+    } else {
+      return new CastExpr(targetType, this);
+    }
+  }
+
+  @Override
+  public int compareTo(LiteralExpr o) {
+    int ret = super.compareTo(o);
+    if (ret != 0) return ret;
+    BoolLiteral other = (BoolLiteral) o;
+    if (value_ && !other.getValue()) return 1;
+    if (!value_ && other.getValue()) return -1;
+    return 0;
+  }
+
+  @Override
+  public Expr clone() { return new BoolLiteral(this); }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/CaseExpr.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CaseExpr.java b/fe/src/main/java/org/apache/impala/analysis/CaseExpr.java
new file mode 100644
index 0000000..bd3ec83
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/CaseExpr.java
@@ -0,0 +1,379 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import java.util.List;
+
+import com.cloudera.impala.catalog.Db;
+import com.cloudera.impala.catalog.Function.CompareMode;
+import com.cloudera.impala.catalog.PrimitiveType;
+import com.cloudera.impala.catalog.ScalarFunction;
+import com.cloudera.impala.catalog.ScalarType;
+import com.cloudera.impala.catalog.Type;
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.thrift.TCaseExpr;
+import com.cloudera.impala.thrift.TExprNode;
+import com.cloudera.impala.thrift.TExprNodeType;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+/**
+ * CASE and DECODE are represented using this class. The backend implementation is
+ * always the "case" function.
+ *
+ * The internal representation of
+ *   CASE [expr] WHEN expr THEN expr [WHEN expr THEN expr ...] [ELSE expr] END
+ * Each When/Then is stored as two consecutive children (whenExpr, thenExpr). If a case
+ * expr is given then it is the first child. If an else expr is given then it is the
+ * last child.
+ *
+ * The internal representation of
+ *   DECODE(expr, key_expr, val_expr [, key_expr, val_expr ...] [, default_val_expr])
+ * has a pair of children for each pair of key/val_expr and an additional child if the
+ * default_val_expr was given. The first child represents the comparison of expr to
+ * key_expr. Decode has three forms:
+ *   1) DECODE(expr, null_literal, val_expr) -
+ *       child[0] = IsNull(expr)
+ *   2) DECODE(expr, non_null_literal, val_expr) -
+ *       child[0] = Eq(expr, literal)
+ *   3) DECODE(expr1, expr2, val_expr) -
+ *       child[0] = Or(And(IsNull(expr1), IsNull(expr2)),  Eq(expr1, expr2))
+ * The children representing val_expr (child[1]) and default_val_expr (child[2]) are
+ * simply the exprs themselves.
+ *
+ * Example of equivalent CASE for DECODE(foo, 'bar', 1, col, 2, NULL, 3, 4):
+ *   CASE
+ *     WHEN foo = 'bar' THEN 1   -- no need for IS NULL check
+ *     WHEN foo IS NULL AND col IS NULL OR foo = col THEN 2
+ *     WHEN foo IS NULL THEN 3  -- no need for equality check
+ *     ELSE 4
+ *   END
+ */
+public class CaseExpr extends Expr {
+
+  // Set if constructed from a DECODE, null otherwise.
+  private FunctionCallExpr decodeExpr_;
+
+  private boolean hasCaseExpr_;
+  private boolean hasElseExpr_;
+
+  public CaseExpr(Expr caseExpr, List<CaseWhenClause> whenClauses, Expr elseExpr) {
+    super();
+    if (caseExpr != null) {
+      children_.add(caseExpr);
+      hasCaseExpr_ = true;
+    }
+    for (CaseWhenClause whenClause: whenClauses) {
+      Preconditions.checkNotNull(whenClause.getWhenExpr());
+      children_.add(whenClause.getWhenExpr());
+      Preconditions.checkNotNull(whenClause.getThenExpr());
+      children_.add(whenClause.getThenExpr());
+    }
+    if (elseExpr != null) {
+      children_.add(elseExpr);
+      hasElseExpr_ = true;
+    }
+  }
+
+  /**
+   * Constructs an equivalent CaseExpr representation.
+   *
+   * The DECODE behavior is basically the same as the hasCaseExpr_ version of CASE.
+   * Though there is one difference. NULLs are considered equal when comparing the
+   * argument to be decoded with the candidates. This differences is for compatibility
+   * with Oracle. http://docs.oracle.com/cd/B19306_01/server.102/b14200/functions040.htm.
+   * To account for the difference, the CASE representation will use the non-hasCaseExpr_
+   * version.
+   *
+   * The return type of DECODE differs from that of Oracle when the third argument is
+   * the NULL literal. In Oracle the return type is STRING. In Impala the return type is
+   * determined by the implicit casting rules (i.e. it's not necessarily a STRING). This
+   * is done so seemingly normal usages such as DECODE(int_col, tinyint_col, NULL,
+   * bigint_col) will avoid type check errors (STRING incompatible with BIGINT).
+   */
+  public CaseExpr(FunctionCallExpr decodeExpr) {
+    super();
+    decodeExpr_ = decodeExpr;
+    hasCaseExpr_ = false;
+
+    int childIdx = 0;
+    Expr encoded = null;
+    Expr encodedIsNull = null;
+    if (!decodeExpr.getChildren().isEmpty()) {
+      encoded = decodeExpr.getChild(childIdx++);
+      encodedIsNull = new IsNullPredicate(encoded, false);
+    }
+
+    // Add the key_expr/val_expr pairs
+    while (childIdx + 2 <= decodeExpr.getChildren().size()) {
+      Expr candidate = decodeExpr.getChild(childIdx++);
+      if (candidate.isLiteral()) {
+        if (candidate.isNullLiteral()) {
+          // An example case is DECODE(foo, NULL, bar), since NULLs are considered
+          // equal, this becomes CASE WHEN foo IS NULL THEN bar END.
+          children_.add(encodedIsNull);
+        } else {
+          children_.add(new BinaryPredicate(
+              BinaryPredicate.Operator.EQ, encoded, candidate));
+        }
+      } else {
+        children_.add(new CompoundPredicate(CompoundPredicate.Operator.OR,
+            new CompoundPredicate(CompoundPredicate.Operator.AND,
+                encodedIsNull, new IsNullPredicate(candidate, false)),
+            new BinaryPredicate(BinaryPredicate.Operator.EQ, encoded, candidate)));
+      }
+
+      // Add the value
+      children_.add(decodeExpr.getChild(childIdx++));
+    }
+
+    // Add the default value
+    if (childIdx < decodeExpr.getChildren().size()) {
+      hasElseExpr_ = true;
+      children_.add(decodeExpr.getChild(childIdx));
+    }
+  }
+
+  /**
+   * Copy c'tor used in clone().
+   */
+  protected CaseExpr(CaseExpr other) {
+    super(other);
+    decodeExpr_ = other.decodeExpr_;
+    hasCaseExpr_ = other.hasCaseExpr_;
+    hasElseExpr_ = other.hasElseExpr_;
+  }
+
+  public static void initBuiltins(Db db) {
+    for (Type t: Type.getSupportedTypes()) {
+      if (t.isNull()) continue;
+      if (t.isScalarType(PrimitiveType.CHAR)) continue;
+      // TODO: case is special and the signature cannot be represented.
+      // It is alternating varargs
+      // e.g. case(bool, type, bool type, bool type, etc).
+      // Instead we just add a version for each of the return types
+      // e.g. case(BOOLEAN), case(INT), etc
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          "case", "", Lists.newArrayList(t), t));
+      // Same for DECODE
+      db.addBuiltin(ScalarFunction.createBuiltinOperator(
+          "decode", "", Lists.newArrayList(t), t));
+    }
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (!super.equals(obj)) return false;
+    CaseExpr expr = (CaseExpr) obj;
+    return hasCaseExpr_ == expr.hasCaseExpr_
+        && hasElseExpr_ == expr.hasElseExpr_
+        && isDecode() == expr.isDecode();
+  }
+
+  @Override
+  public String toSqlImpl() {
+    return (decodeExpr_ == null) ? toCaseSql() : decodeExpr_.toSqlImpl();
+  }
+
+  @VisibleForTesting
+  String toCaseSql() {
+    StringBuilder output = new StringBuilder("CASE");
+    int childIdx = 0;
+    if (hasCaseExpr_) {
+      output.append(" " + children_.get(childIdx++).toSql());
+    }
+    while (childIdx + 2 <= children_.size()) {
+      output.append(" WHEN " + children_.get(childIdx++).toSql());
+      output.append(" THEN " + children_.get(childIdx++).toSql());
+    }
+    if (hasElseExpr_) {
+      output.append(" ELSE " + children_.get(children_.size() - 1).toSql());
+    }
+    output.append(" END");
+    return output.toString();
+  }
+
+  @Override
+  protected void toThrift(TExprNode msg) {
+    msg.node_type = TExprNodeType.CASE_EXPR;
+    msg.case_expr = new TCaseExpr(hasCaseExpr_, hasElseExpr_);
+  }
+
+  private void castCharToString(int childIndex) throws AnalysisException {
+    if (children_.get(childIndex).getType().isScalarType(PrimitiveType.CHAR)) {
+      children_.set(childIndex, children_.get(childIndex).castTo(ScalarType.STRING));
+    }
+  }
+
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    super.analyze(analyzer);
+
+    if (isDecode()) {
+      Preconditions.checkState(!hasCaseExpr_);
+      // decodeExpr_.analyze() would fail validating function existence. The complex
+      // vararg signature is currently unsupported.
+      FunctionCallExpr.validateScalarFnParams(decodeExpr_.getParams());
+      if (decodeExpr_.getChildren().size() < 3) {
+        throw new AnalysisException("DECODE in '" + toSql() + "' requires at least 3 "
+            + "arguments.");
+      }
+    }
+
+    // Since we have no BE implementation of a CaseExpr with CHAR types,
+    // we cast the CHAR-typed whenExprs and caseExprs to STRING,
+    // TODO: This casting is not always correct and needs to be fixed, see IMPALA-1652.
+
+    // Keep track of maximum compatible type of case expr and all when exprs.
+    Type whenType = null;
+    // Keep track of maximum compatible type of else expr and all then exprs.
+    Type returnType = null;
+    // Remember last of these exprs for error reporting.
+    Expr lastCompatibleThenExpr = null;
+    Expr lastCompatibleWhenExpr = null;
+    int loopEnd = children_.size();
+    if (hasElseExpr_) {
+      --loopEnd;
+    }
+    int loopStart;
+    Expr caseExpr = null;
+    // Set loop start, and initialize returnType as type of castExpr.
+    if (hasCaseExpr_) {
+      loopStart = 1;
+      castCharToString(0);
+      caseExpr = children_.get(0);
+      caseExpr.analyze(analyzer);
+      whenType = caseExpr.getType();
+      lastCompatibleWhenExpr = children_.get(0);
+    } else {
+      whenType = Type.BOOLEAN;
+      loopStart = 0;
+    }
+
+    // Go through when/then exprs and determine compatible types.
+    for (int i = loopStart; i < loopEnd; i += 2) {
+      castCharToString(i);
+      Expr whenExpr = children_.get(i);
+      if (hasCaseExpr_) {
+        // Determine maximum compatible type of the case expr,
+        // and all when exprs seen so far. We will add casts to them at the very end.
+        whenType = analyzer.getCompatibleType(whenType,
+            lastCompatibleWhenExpr, whenExpr);
+        lastCompatibleWhenExpr = whenExpr;
+      } else {
+        // If no case expr was given, then the when exprs should always return
+        // boolean or be castable to boolean.
+        if (!Type.isImplicitlyCastable(whenExpr.getType(), Type.BOOLEAN, false)) {
+          Preconditions.checkState(isCase());
+          throw new AnalysisException("When expr '" + whenExpr.toSql() + "'" +
+              " is not of type boolean and not castable to type boolean.");
+        }
+        // Add a cast if necessary.
+        if (!whenExpr.getType().isBoolean()) castChild(Type.BOOLEAN, i);
+      }
+      // Determine maximum compatible type of the then exprs seen so far.
+      // We will add casts to them at the very end.
+      Expr thenExpr = children_.get(i + 1);
+      returnType = analyzer.getCompatibleType(returnType,
+          lastCompatibleThenExpr, thenExpr);
+      lastCompatibleThenExpr = thenExpr;
+    }
+    if (hasElseExpr_) {
+      Expr elseExpr = children_.get(children_.size() - 1);
+      returnType = analyzer.getCompatibleType(returnType,
+          lastCompatibleThenExpr, elseExpr);
+    }
+
+    // Make sure BE doesn't see TYPE_NULL by picking an arbitrary type
+    if (whenType.isNull()) whenType = ScalarType.BOOLEAN;
+    if (returnType.isNull()) returnType = ScalarType.BOOLEAN;
+
+    // Add casts to case expr to compatible type.
+    if (hasCaseExpr_) {
+      // Cast case expr.
+      if (!children_.get(0).type_.equals(whenType)) {
+        castChild(whenType, 0);
+      }
+      // Add casts to when exprs to compatible type.
+      for (int i = loopStart; i < loopEnd; i += 2) {
+        if (!children_.get(i).type_.equals(whenType)) {
+          castChild(whenType, i);
+        }
+      }
+    }
+    // Cast then exprs to compatible type.
+    for (int i = loopStart + 1; i < children_.size(); i += 2) {
+      if (!children_.get(i).type_.equals(returnType)) {
+        castChild(returnType, i);
+      }
+    }
+    // Cast else expr to compatible type.
+    if (hasElseExpr_) {
+      if (!children_.get(children_.size() - 1).type_.equals(returnType)) {
+        castChild(returnType, children_.size() - 1);
+      }
+    }
+
+    // Do the function lookup just based on the whenType.
+    Type[] args = new Type[1];
+    args[0] = whenType;
+    fn_ = getBuiltinFunction(analyzer, "case", args,
+        CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
+    Preconditions.checkNotNull(fn_);
+    type_ = returnType;
+
+    // Compute cost as the sum of evaluating all of the WHEN exprs, plus
+    // the max of the THEN/ELSE exprs.
+    float maxThenCost = 0;
+    float whenCosts = 0;
+    boolean hasChildCosts = true;
+    for (int i = 0; i < children_.size(); ++i) {
+      if (!getChild(i).hasCost()) {
+        hasChildCosts = false;
+        break;
+      }
+
+      if (hasCaseExpr_ && i % 2 == 1) {
+        // This child is a WHEN expr. BINARY_PREDICATE_COST accounts for the cost of
+        // comparing the CASE expr to the WHEN expr.
+        whenCosts += getChild(0).getCost() + getChild(i).getCost() +
+          BINARY_PREDICATE_COST;
+      } else if (!hasCaseExpr_ && i % 2 == 0) {
+        // This child is a WHEN expr.
+        whenCosts += getChild(i).getCost();
+      } else if (i != 0) {
+        // This child is a THEN or ELSE expr.
+        float thenCost = getChild(i).getCost();
+        if (thenCost > maxThenCost) maxThenCost = thenCost;
+      }
+    }
+    if (hasChildCosts) {
+      evalCost_ =  whenCosts + maxThenCost;
+    }
+  }
+
+  private boolean isCase() { return !isDecode(); }
+  private boolean isDecode() { return decodeExpr_ != null; }
+  public boolean hasCaseExpr() { return hasCaseExpr_; }
+  public boolean hasElseExpr() { return hasElseExpr_; }
+
+  @Override
+  public Expr clone() { return new CaseExpr(this); }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/CaseWhenClause.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CaseWhenClause.java b/fe/src/main/java/org/apache/impala/analysis/CaseWhenClause.java
new file mode 100644
index 0000000..8b1433e
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/CaseWhenClause.java
@@ -0,0 +1,42 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+
+/**
+ * captures info of a single WHEN expr THEN expr clause.
+ *
+ */
+class CaseWhenClause {
+  private final Expr whenExpr_;
+  private final Expr thenExpr_;
+
+  public CaseWhenClause(Expr whenExpr, Expr thenExpr) {
+    super();
+    this.whenExpr_ = whenExpr;
+    this.thenExpr_ = thenExpr;
+  }
+
+  public Expr getWhenExpr() {
+    return whenExpr_;
+  }
+
+  public Expr getThenExpr() {
+    return thenExpr_;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/CastExpr.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CastExpr.java b/fe/src/main/java/org/apache/impala/analysis/CastExpr.java
new file mode 100644
index 0000000..2b3b271
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/CastExpr.java
@@ -0,0 +1,312 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import com.cloudera.impala.catalog.Catalog;
+import com.cloudera.impala.catalog.Db;
+import com.cloudera.impala.catalog.Function;
+import com.cloudera.impala.catalog.Function.CompareMode;
+import com.cloudera.impala.catalog.PrimitiveType;
+import com.cloudera.impala.catalog.ScalarFunction;
+import com.cloudera.impala.catalog.ScalarType;
+import com.cloudera.impala.catalog.Type;
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.thrift.TExpr;
+import com.cloudera.impala.thrift.TExprNode;
+import com.cloudera.impala.thrift.TExprNodeType;
+import com.google.common.base.Objects;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+public class CastExpr extends Expr {
+  // Only set for explicit casts. Null for implicit casts.
+  private final TypeDef targetTypeDef_;
+
+  // True if this is a "pre-analyzed" implicit cast.
+  private final boolean isImplicit_;
+
+  // True if this cast does not change the type.
+  private boolean noOp_ = false;
+
+  /**
+   * C'tor for "pre-analyzed" implicit casts.
+   */
+  public CastExpr(Type targetType, Expr e) {
+    super();
+    Preconditions.checkState(targetType.isValid());
+    Preconditions.checkNotNull(e);
+    type_ = targetType;
+    targetTypeDef_ = null;
+    isImplicit_ = true;
+    // replace existing implicit casts
+    if (e instanceof CastExpr) {
+      CastExpr castExpr = (CastExpr) e;
+      if (castExpr.isImplicit()) e = castExpr.getChild(0);
+    }
+    children_.add(e);
+
+    // Implicit casts don't call analyze()
+    // TODO: this doesn't seem like the cleanest approach but there are places
+    // we generate these (e.g. table loading) where there is no analyzer object.
+    try {
+      analyze();
+      computeNumDistinctValues();
+    } catch (AnalysisException ex) {
+      Preconditions.checkState(false,
+          "Implicit casts should never throw analysis exception.");
+    }
+    isAnalyzed_ = true;
+  }
+
+  /**
+   * C'tor for explicit casts.
+   */
+  public CastExpr(TypeDef targetTypeDef, Expr e) {
+    Preconditions.checkNotNull(targetTypeDef);
+    Preconditions.checkNotNull(e);
+    isImplicit_ = false;
+    targetTypeDef_ = targetTypeDef;
+    children_.add(e);
+  }
+
+  /**
+   * Copy c'tor used in clone().
+   */
+  protected CastExpr(CastExpr other) {
+    super(other);
+    targetTypeDef_ = other.targetTypeDef_;
+    isImplicit_ = other.isImplicit_;
+    noOp_ = other.noOp_;
+  }
+
+  private static String getFnName(Type targetType) {
+    return "castTo" + targetType.getPrimitiveType().toString();
+  }
+
+  public static void initBuiltins(Db db) {
+    for (Type fromType : Type.getSupportedTypes()) {
+      if (fromType.isNull()) continue;
+      for (Type toType : Type.getSupportedTypes()) {
+        if (toType.isNull()) continue;
+        // Disable casting from string to boolean
+        if (fromType.isStringType() && toType.isBoolean()) continue;
+        // Disable casting from boolean/timestamp to decimal
+        if ((fromType.isBoolean() || fromType.isDateType()) && toType.isDecimal()) {
+          continue;
+        }
+        if (fromType.getPrimitiveType() == PrimitiveType.STRING
+            && toType.getPrimitiveType() == PrimitiveType.CHAR) {
+          // Allow casting from String to Char(N)
+          String beSymbol = "impala::CastFunctions::CastToChar";
+          db.addBuiltin(ScalarFunction.createBuiltin(getFnName(ScalarType.CHAR),
+              Lists.newArrayList((Type) ScalarType.STRING), false, ScalarType.CHAR,
+              beSymbol, null, null, true));
+          continue;
+        }
+        if (fromType.getPrimitiveType() == PrimitiveType.CHAR
+            && toType.getPrimitiveType() == PrimitiveType.CHAR) {
+          // Allow casting from CHAR(N) to Char(N)
+          String beSymbol = "impala::CastFunctions::CastToChar";
+          db.addBuiltin(ScalarFunction.createBuiltin(getFnName(ScalarType.CHAR),
+              Lists.newArrayList((Type) ScalarType.createCharType(-1)), false,
+              ScalarType.CHAR, beSymbol, null, null, true));
+          continue;
+        }
+        if (fromType.getPrimitiveType() == PrimitiveType.VARCHAR
+            && toType.getPrimitiveType() == PrimitiveType.VARCHAR) {
+          // Allow casting from VARCHAR(N) to VARCHAR(M)
+          String beSymbol = "impala::CastFunctions::CastToStringVal";
+          db.addBuiltin(ScalarFunction.createBuiltin(getFnName(ScalarType.VARCHAR),
+              Lists.newArrayList((Type) ScalarType.VARCHAR), false, ScalarType.VARCHAR,
+              beSymbol, null, null, true));
+          continue;
+        }
+        if (fromType.getPrimitiveType() == PrimitiveType.VARCHAR
+            && toType.getPrimitiveType() == PrimitiveType.CHAR) {
+          // Allow casting from VARCHAR(N) to CHAR(M)
+          String beSymbol = "impala::CastFunctions::CastToChar";
+          db.addBuiltin(ScalarFunction.createBuiltin(getFnName(ScalarType.CHAR),
+              Lists.newArrayList((Type) ScalarType.VARCHAR), false, ScalarType.CHAR,
+              beSymbol, null, null, true));
+          continue;
+        }
+        if (fromType.getPrimitiveType() == PrimitiveType.CHAR
+            && toType.getPrimitiveType() == PrimitiveType.VARCHAR) {
+          // Allow casting from CHAR(N) to VARCHAR(M)
+          String beSymbol = "impala::CastFunctions::CastToStringVal";
+          db.addBuiltin(ScalarFunction.createBuiltin(getFnName(ScalarType.VARCHAR),
+              Lists.newArrayList((Type) ScalarType.CHAR), false, ScalarType.VARCHAR,
+              beSymbol, null, null, true));
+          continue;
+        }
+        // Disable no-op casts
+        if (fromType.equals(toType) && !fromType.isDecimal()) continue;
+        String beClass = toType.isDecimal() || fromType.isDecimal() ?
+            "DecimalOperators" : "CastFunctions";
+        String beSymbol = "impala::" + beClass + "::CastTo" + Function.getUdfType(toType);
+        db.addBuiltin(ScalarFunction.createBuiltin(getFnName(toType),
+            Lists.newArrayList(fromType), false, toType, beSymbol,
+            null, null, true));
+      }
+    }
+  }
+
+  @Override
+  public String toSqlImpl() {
+    if (isImplicit_) return getChild(0).toSql();
+    return "CAST(" + getChild(0).toSql() + " AS " + targetTypeDef_.toString() + ")";
+  }
+
+  @Override
+  protected void treeToThriftHelper(TExpr container) {
+    if (noOp_) {
+      getChild(0).treeToThriftHelper(container);
+      return;
+    }
+    super.treeToThriftHelper(container);
+  }
+
+  @Override
+  protected void toThrift(TExprNode msg) {
+    msg.node_type = TExprNodeType.FUNCTION_CALL;
+  }
+
+  @Override
+  public String debugString() {
+    return Objects.toStringHelper(this)
+        .add("isImplicit", isImplicit_)
+        .add("target", type_)
+        .addValue(super.debugString())
+        .toString();
+  }
+
+  public boolean isImplicit() { return isImplicit_; }
+
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    Preconditions.checkState(!isImplicit_);
+    super.analyze(analyzer);
+    targetTypeDef_.analyze(analyzer);
+    type_ = targetTypeDef_.getType();
+    analyze();
+  }
+
+  private void analyze() throws AnalysisException {
+    if (getChild(0).hasCost()) evalCost_ = getChild(0).getCost() + CAST_COST;
+
+    Preconditions.checkNotNull(type_);
+    if (type_.isComplexType()) {
+      throw new AnalysisException(
+          "Unsupported cast to complex type: " + type_.toSql());
+    }
+
+    boolean readyForCharCast =
+        children_.get(0).getType().getPrimitiveType() == PrimitiveType.STRING ||
+        children_.get(0).getType().getPrimitiveType() == PrimitiveType.CHAR;
+    if (type_.getPrimitiveType() == PrimitiveType.CHAR && !readyForCharCast) {
+      // Back end functions only exist to cast string types to CHAR, there is not a cast
+      // for every type since it is redundant with STRING. Casts to go through 2 casts:
+      // (1) cast to string, to stringify the value
+      // (2) cast to CHAR, to truncate or pad with spaces
+      CastExpr tostring = new CastExpr(ScalarType.STRING, children_.get(0));
+      tostring.analyze();
+      children_.set(0, tostring);
+    }
+
+    if (children_.get(0) instanceof NumericLiteral && type_.isFloatingPointType()) {
+      // Special case casting a decimal literal to a floating point number. The
+      // decimal literal can be interpreted as either and we want to avoid casts
+      // since that can result in loss of accuracy.
+      ((NumericLiteral)children_.get(0)).explicitlyCastToFloat(type_);
+    }
+
+    if (children_.get(0).getType().isNull()) {
+      // Make sure BE never sees TYPE_NULL
+      uncheckedCastChild(type_, 0);
+    }
+
+    // Ensure child has non-null type (even if it's a null literal). This is required
+    // for the UDF interface.
+    if (children_.get(0) instanceof NullLiteral) {
+      NullLiteral nullChild = (NullLiteral)(children_.get(0));
+      nullChild.uncheckedCastTo(type_);
+    }
+
+    Type childType = children_.get(0).type_;
+    Preconditions.checkState(!childType.isNull());
+    if (childType.equals(type_)) {
+      noOp_ = true;
+      return;
+    }
+
+    FunctionName fnName = new FunctionName(Catalog.BUILTINS_DB, getFnName(type_));
+    Type[] args = { childType };
+    Function searchDesc = new Function(fnName, args, Type.INVALID, false);
+    if (isImplicit_) {
+      fn_ = Catalog.getBuiltin(searchDesc, CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
+      Preconditions.checkState(fn_ != null);
+    } else {
+      fn_ = Catalog.getBuiltin(searchDesc, CompareMode.IS_IDENTICAL);
+      if (fn_ == null) {
+        // allow for promotion from CHAR to STRING; only if no exact match is found
+        fn_ = Catalog.getBuiltin(searchDesc.promoteCharsToStrings(),
+            CompareMode.IS_IDENTICAL);
+      }
+    }
+    if (fn_ == null) {
+      throw new AnalysisException("Invalid type cast of " + getChild(0).toSql() +
+          " from " + childType + " to " + type_);
+    }
+
+    Preconditions.checkState(type_.matchesType(fn_.getReturnType()),
+        type_ + " != " + fn_.getReturnType());
+  }
+
+  /**
+   * Returns child expr if this expr is an implicit cast, otherwise returns 'this'.
+   */
+  @Override
+  public Expr ignoreImplicitCast() {
+    if (isImplicit_) {
+      // we don't expect to see to consecutive implicit casts
+      Preconditions.checkState(
+          !(getChild(0) instanceof CastExpr) || !((CastExpr) getChild(0)).isImplicit());
+      return getChild(0);
+    } else {
+      return this;
+    }
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) return true;
+    if (obj instanceof CastExpr) {
+      CastExpr other = (CastExpr) obj;
+      return isImplicit_ == other.isImplicit_
+          && type_.equals(other.type_)
+          && super.equals(obj);
+    }
+    // Ignore implicit casts when comparing expr trees.
+    if (isImplicit_) return getChild(0).equals(obj);
+    return false;
+  }
+
+  @Override
+  public Expr clone() { return new CastExpr(this); }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/CollectionStructType.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CollectionStructType.java b/fe/src/main/java/org/apache/impala/analysis/CollectionStructType.java
new file mode 100644
index 0000000..b45b856
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/CollectionStructType.java
@@ -0,0 +1,79 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import java.util.ArrayList;
+
+import jline.internal.Preconditions;
+
+import com.cloudera.impala.catalog.ArrayType;
+import com.cloudera.impala.catalog.MapType;
+import com.cloudera.impala.catalog.ScalarType;
+import com.cloudera.impala.catalog.StructField;
+import com.cloudera.impala.catalog.StructType;
+import com.cloudera.impala.catalog.Type;
+import com.google.common.collect.Lists;
+
+/**
+ * Generated struct type describing the fields of a collection type
+ * that can be referenced in paths.
+ *
+ * Parent Type      CollectionStructType
+ * array<i>    -->  struct<item:i,pos:bigint>
+ * map<k,v>    -->  struct<key:k,value:v>
+ */
+public class CollectionStructType extends StructType {
+  // True if this struct describes the fields of a map,
+  // false if it describes the fields of an array.
+  private final boolean isMapStruct_;
+
+  // Field that can be skipped by implicit paths if its type is a struct.
+  private final StructField optionalField_;
+
+  private CollectionStructType(ArrayList<StructField> fields, boolean isMapStruct) {
+    super(fields);
+    isMapStruct_ = isMapStruct;
+    if (isMapStruct_) {
+      optionalField_ = getField(Path.MAP_VALUE_FIELD_NAME);
+    } else {
+      optionalField_ = getField(Path.ARRAY_ITEM_FIELD_NAME);
+    }
+    Preconditions.checkNotNull(optionalField_);
+  }
+
+  public static CollectionStructType createArrayStructType(ArrayType arrayType) {
+    Type itemType = arrayType.getItemType();
+    ArrayList<StructField> fields = Lists.newArrayListWithCapacity(2);
+    // The item field name comes before the pos field name so that a path to the
+    // stored item corresponds to its physical path.
+    fields.add(new StructField(Path.ARRAY_ITEM_FIELD_NAME, itemType));
+    fields.add(new StructField(Path.ARRAY_POS_FIELD_NAME, ScalarType.BIGINT));
+    return new CollectionStructType(fields, false);
+  }
+
+  public static CollectionStructType createMapStructType(MapType mapType) {
+    ArrayList<StructField> mapFields = Lists.newArrayListWithCapacity(2);
+    mapFields.add(new StructField(Path.MAP_KEY_FIELD_NAME, mapType.getKeyType()));
+    mapFields.add(new StructField(Path.MAP_VALUE_FIELD_NAME, mapType.getValueType()));
+    return new CollectionStructType(mapFields, true);
+  }
+
+  public StructField getOptionalField() { return optionalField_; }
+  public boolean isMapStruct() { return isMapStruct_; }
+  public boolean isArrayStruct() { return !isMapStruct_; }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/CollectionTableRef.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CollectionTableRef.java b/fe/src/main/java/org/apache/impala/analysis/CollectionTableRef.java
new file mode 100644
index 0000000..8abed3e
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/CollectionTableRef.java
@@ -0,0 +1,138 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import com.cloudera.impala.authorization.Privilege;
+import com.cloudera.impala.authorization.PrivilegeRequestBuilder;
+import com.cloudera.impala.common.AnalysisException;
+import com.google.common.base.Preconditions;
+
+/**
+ * Reference to a MAP or ARRAY collection type that implies its
+ * flattening during execution.
+ * TODO: We currently create a new slot in the root tuple descriptor for every
+ * relative collection ref, even if they have the same path. The BE currently relies on
+ * this behavior for setting collection slots to NULL after they have been unnested
+ * inside a SubplanNode. We could instead share the slot and the corresponding item tuple
+ * descriptor among all collection table refs with the same path. This change will
+ * require decoupling tuple descriptors from table aliases, i.e., a tuple descriptor
+ * should be able to back multiple aliases.
+ */
+public class CollectionTableRef extends TableRef {
+  /////////////////////////////////////////
+  // BEGIN: Members that need to be reset()
+
+  // Expr that returns the referenced collection. Typically a SlotRef into the
+  // parent scan's tuple. Result of analysis. Fully resolved against base tables.
+  private Expr collectionExpr_;
+
+  // END: Members that need to be reset()
+  /////////////////////////////////////////
+
+  /**
+   * Create a CollectionTableRef from the original unresolved table ref as well as
+   * its resolved path. Sets table aliases and join-related attributes.
+   */
+  public CollectionTableRef(TableRef tableRef, Path resolvedPath) {
+    super(tableRef);
+    Preconditions.checkState(resolvedPath.isResolved());
+    resolvedPath_ = resolvedPath;
+    // Use the last path element as an implicit alias if no explicit alias was given.
+    if (hasExplicitAlias()) return;
+    String implicitAlias = rawPath_.get(rawPath_.size() - 1).toLowerCase();
+    aliases_ = new String[] { implicitAlias };
+  }
+
+  /**
+   * C'tor for cloning.
+   */
+  public CollectionTableRef(CollectionTableRef other) {
+    super(other);
+    collectionExpr_ =
+        (other.collectionExpr_ != null) ? other.collectionExpr_.clone() : null;
+  }
+
+  /**
+   * Registers this collection table ref with the given analyzer and adds a slot
+   * descriptor for the materialized collection to be populated by parent scan.
+   * Also determines whether this collection table ref is correlated or not.
+   *
+   * If this function is called in the context of analyzing a WITH clause, then
+   * no slot is added to the parent descriptor so as to not pollute the analysis
+   * state of the parent block (the WITH-clause analyzer is discarded, and the
+   * parent analyzer could have an entirely different global state).
+   */
+  @Override
+  public void analyze(Analyzer analyzer) throws AnalysisException {
+    if (isAnalyzed_) return;
+    desc_ = analyzer.registerTableRef(this);
+    if (isRelative() && !analyzer.isWithClause()) {
+      SlotDescriptor parentSlotDesc = analyzer.registerSlotRef(resolvedPath_);
+      parentSlotDesc.setItemTupleDesc(desc_);
+      collectionExpr_ = new SlotRef(parentSlotDesc);
+      // Must always be materialized to ensure the correct cardinality after unnesting.
+      analyzer.materializeSlots(collectionExpr_);
+      Analyzer parentAnalyzer =
+          analyzer.findAnalyzer(resolvedPath_.getRootDesc().getId());
+      Preconditions.checkNotNull(parentAnalyzer);
+      if (parentAnalyzer != analyzer) {
+        TableRef parentRef =
+            parentAnalyzer.getTableRef(resolvedPath_.getRootDesc().getId());
+        Preconditions.checkNotNull(parentRef);
+        // InlineViews are currently not supported as a parent ref.
+        Preconditions.checkState(!(parentRef instanceof InlineViewRef));
+        correlatedTupleIds_.add(parentRef.getId());
+      }
+    }
+    if (!isRelative()) {
+      // Register a table-level privilege request as well as a column-level privilege request
+      // for the collection-typed column.
+      Preconditions.checkNotNull(resolvedPath_.getRootTable());
+      analyzer.registerAuthAndAuditEvent(resolvedPath_.getRootTable(), analyzer);
+      analyzer.registerPrivReq(new PrivilegeRequestBuilder().
+          allOf(Privilege.SELECT).onColumn(desc_.getTableName().getDb(),
+          desc_.getTableName().getTbl(), desc_.getPath().getRawPath().get(0))
+          .toRequest());
+    }
+    isAnalyzed_ = true;
+    analyzeHints(analyzer);
+
+    // TODO: For joins on nested collections some join ops can be simplified
+    // due to the containment relationship of the parent and child. For example,
+    // a FULL OUTER JOIN would become a LEFT OUTER JOIN, or a RIGHT SEMI JOIN
+    // would become an INNER or CROSS JOIN.
+    analyzeJoin(analyzer);
+  }
+
+  @Override
+  public boolean isRelative() {
+    Preconditions.checkNotNull(resolvedPath_);
+    return resolvedPath_.getRootDesc() != null;
+  }
+
+  public Expr getCollectionExpr() { return collectionExpr_; }
+
+  @Override
+  protected CollectionTableRef clone() { return new CollectionTableRef(this); }
+
+  @Override
+  public void reset() {
+    super.reset();
+    collectionExpr_ = null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-impala/blob/b544f019/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java b/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
new file mode 100644
index 0000000..e7a3170
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
@@ -0,0 +1,143 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package com.cloudera.impala.analysis;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+import com.cloudera.impala.catalog.Type;
+import com.cloudera.impala.common.AnalysisException;
+import com.cloudera.impala.thrift.TColumn;
+import com.cloudera.impala.util.MetaStoreUtil;
+import com.google.common.base.Function;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+/**
+ * Represents a column definition in a CREATE/ALTER TABLE/VIEW statement.
+ * Column definitions in CREATE/ALTER TABLE statements require a column type,
+ * whereas column definitions in CREATE/ALTER VIEW statements infer the column type from
+ * the corresponding view definition. All column definitions have an optional comment.
+ * Since a column definition refers a column stored in the Metastore, the column name
+ * must be valid according to the Metastore's rules (see @MetaStoreUtils).
+ */
+public class ColumnDef {
+  private final String colName_;
+  private String comment_;
+
+  // Required in CREATE/ALTER TABLE stmts. Set to NULL in CREATE/ALTER VIEW stmts,
+  // for which we setType() after analyzing the defining view definition stmt.
+  private final TypeDef typeDef_;
+  private Type type_;
+
+  public ColumnDef(String colName, TypeDef typeDef, String comment) {
+    colName_ = colName.toLowerCase();
+    typeDef_ = typeDef;
+    comment_ = comment;
+  }
+
+  /**
+   * Creates an analyzed ColumnDef from a Hive FieldSchema. Throws if the FieldSchema's
+   * type is not supported.
+   */
+  private ColumnDef(FieldSchema fs) throws AnalysisException {
+    Type type = Type.parseColumnType(fs.getType());
+    if (type == null) {
+      throw new AnalysisException(String.format(
+          "Unsupported type '%s' in Hive field schema '%s'",
+          fs.getType(), fs.getName()));
+    }
+    colName_ = fs.getName();
+    typeDef_ = new TypeDef(type);
+    comment_ = fs.getComment();
+    analyze();
+  }
+
+  public void setType(Type type) { type_ = type; }
+  public Type getType() { return type_; }
+  public TypeDef getTypeDef() { return typeDef_; }
+  public String getColName() { return colName_; }
+  public void setComment(String comment) { comment_ = comment; }
+  public String getComment() { return comment_; }
+
+  public void analyze() throws AnalysisException {
+    // Check whether the column name meets the Metastore's requirements.
+    if (!MetaStoreUtils.validateName(colName_)) {
+      throw new AnalysisException("Invalid column/field name: " + colName_);
+    }
+    if (typeDef_ != null) {
+      typeDef_.analyze(null);
+      type_ = typeDef_.getType();
+    }
+    Preconditions.checkNotNull(type_);
+    Preconditions.checkState(type_.isValid());
+    // Check HMS constraints of type and comment.
+    String typeSql = type_.toSql();
+    if (typeSql.length() > MetaStoreUtil.MAX_TYPE_NAME_LENGTH) {
+      throw new AnalysisException(String.format(
+          "Type of column '%s' exceeds maximum type length of %d characters:\n" +
+          "%s has %d characters.", colName_, MetaStoreUtil.MAX_TYPE_NAME_LENGTH,
+          typeSql, typeSql.length()));
+    }
+    if (comment_ != null &&
+        comment_.length() > MetaStoreUtil.CREATE_MAX_COMMENT_LENGTH) {
+      throw new AnalysisException(String.format(
+          "Comment of column '%s' exceeds maximum length of %d characters:\n" +
+          "%s has %d characters.", colName_, MetaStoreUtil.CREATE_MAX_COMMENT_LENGTH,
+          comment_, comment_.length()));
+    }
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder(colName_);
+    if (type_ != null) {
+      sb.append(" " + type_.toString());
+    } else {
+      sb.append(" " + typeDef_.toString());
+    }
+    if (comment_ != null) sb.append(String.format(" COMMENT '%s'", comment_));
+    return sb.toString();
+  }
+
+  public TColumn toThrift() {
+    TColumn col = new TColumn(new TColumn(getColName(), type_.toThrift()));
+    col.setComment(getComment());
+    return col;
+  }
+
+  public static List<ColumnDef> createFromFieldSchemas(List<FieldSchema> fieldSchemas)
+      throws AnalysisException {
+    List<ColumnDef> result = Lists.newArrayListWithCapacity(fieldSchemas.size());
+    for (FieldSchema fs: fieldSchemas) result.add(new ColumnDef(fs));
+    return result;
+  }
+
+  public static List<FieldSchema> toFieldSchemas(List<ColumnDef> colDefs) {
+    return Lists.transform(colDefs, new Function<ColumnDef, FieldSchema>() {
+      public FieldSchema apply(ColumnDef colDef) {
+        Preconditions.checkNotNull(colDef.getType());
+        return new FieldSchema(colDef.getColName(), colDef.getType().toSql(),
+            colDef.getComment());
+      }
+    });
+  }
+
+}