You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2015/08/18 00:00:41 UTC

[11/50] [abbrv] hive git commit: HIVE-11398: Parse wide OR and wide AND trees to flat OR/AND trees (Jesus Camacho Rodriguez via Gopal V)

HIVE-11398: Parse wide OR and wide AND trees to flat OR/AND trees (Jesus Camacho Rodriguez via Gopal V)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7f3e4811
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7f3e4811
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7f3e4811

Branch: refs/heads/hbase-metastore
Commit: 7f3e4811ee0293e4b3889984dc7d790415532307
Parents: 538ae70
Author: Gunther Hagleitner <gu...@apache.org>
Authored: Mon Aug 10 19:57:19 2015 -0700
Committer: Gunther Hagleitner <gu...@apache.org>
Committed: Mon Aug 10 19:57:19 2015 -0700

----------------------------------------------------------------------
 .../test/results/positive/hbase_timestamp.q.out |   8 +-
 .../optimizer/ConstantPropagateProcFactory.java |  83 +++++++---
 .../ql/optimizer/pcr/PcrExprProcFactory.java    | 151 +++++++++++++------
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |  69 ++++++---
 .../hive/ql/parse/TypeCheckProcFactory.java     |  40 ++++-
 .../hive/ql/udf/generic/GenericUDFOPAnd.java    |  59 +++++---
 .../hive/ql/udf/generic/GenericUDFOPOr.java     |  59 +++++---
 .../queries/clientpositive/flatten_and_or.q     |  17 +++
 .../annotate_stats_deep_filters.q.out           |   4 +-
 .../clientpositive/dynamic_rdd_cache.q.out      |   6 +-
 .../results/clientpositive/flatten_and_or.q.out |  66 ++++++++
 .../groupby_multi_single_reducer3.q.out         |   8 +-
 .../clientpositive/input_testxpath4.q.out       |   2 +-
 .../join_cond_pushdown_unqual4.q.out            |   2 +-
 .../test/results/clientpositive/lineage3.q.out  |   2 +-
 .../clientpositive/orc_predicate_pushdown.q.out |  36 ++---
 .../results/clientpositive/ppd_gby_join.q.out   |   4 +-
 .../test/results/clientpositive/ppd_join.q.out  |   4 +-
 .../test/results/clientpositive/ppd_join2.q.out |   4 +-
 .../test/results/clientpositive/ppd_join3.q.out |   6 +-
 .../clientpositive/ppd_outer_join4.q.out        |   2 +-
 .../spark/dynamic_rdd_cache.q.out               |   6 +-
 .../spark/groupby_multi_single_reducer3.q.out   |   8 +-
 .../spark/join_cond_pushdown_unqual4.q.out      |   2 +-
 .../clientpositive/spark/ppd_gby_join.q.out     |   4 +-
 .../results/clientpositive/spark/ppd_join.q.out |   4 +-
 .../clientpositive/spark/ppd_join2.q.out        |   4 +-
 .../clientpositive/spark/ppd_join3.q.out        |   6 +-
 .../clientpositive/spark/ppd_outer_join4.q.out  |   2 +-
 .../clientpositive/spark/vectorization_0.q.out  |   2 +-
 .../clientpositive/spark/vectorization_13.q.out |   4 +-
 .../clientpositive/spark/vectorization_15.q.out |   2 +-
 .../clientpositive/spark/vectorization_17.q.out |   2 +-
 .../spark/vectorization_short_regress.q.out     |  22 +--
 .../clientpositive/spark/vectorized_case.q.out  |   2 +-
 .../tez/vector_mr_diff_schema_alias.q.out       |   2 +-
 .../clientpositive/tez/vectorization_0.q.out    |   2 +-
 .../clientpositive/tez/vectorization_13.q.out   |   4 +-
 .../clientpositive/tez/vectorization_15.q.out   |   2 +-
 .../clientpositive/tez/vectorization_17.q.out   |   2 +-
 .../clientpositive/tez/vectorization_7.q.out    |   4 +-
 .../clientpositive/tez/vectorization_8.q.out    |   4 +-
 .../tez/vectorization_short_regress.q.out       |  22 +--
 .../clientpositive/tez/vectorized_case.q.out    |   2 +-
 ql/src/test/results/clientpositive/udf_or.q.out |   4 +-
 .../vector_mr_diff_schema_alias.q.out           |   2 +-
 .../clientpositive/vectorization_0.q.out        |   2 +-
 .../clientpositive/vectorization_13.q.out       |   4 +-
 .../clientpositive/vectorization_15.q.out       |   2 +-
 .../clientpositive/vectorization_17.q.out       |   2 +-
 .../clientpositive/vectorization_7.q.out        |   4 +-
 .../clientpositive/vectorization_8.q.out        |   4 +-
 .../vectorization_short_regress.q.out           |  22 +--
 .../clientpositive/vectorized_case.q.out        |   2 +-
 54 files changed, 530 insertions(+), 264 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index 7aef504..538e551 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -174,7 +174,7 @@ STAGE PLANS:
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time < 200000000000)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time < 200000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0) and (time < 200000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -223,7 +223,7 @@ STAGE PLANS:
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time > 100000000000)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time > 100000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0) and (time > 100000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -274,7 +274,7 @@ STAGE PLANS:
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time <= 100000000000)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time <= 100000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0) and (time <= 100000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)
@@ -323,7 +323,7 @@ STAGE PLANS:
             filterExpr: (((key > 100.0) and (key < 400.0)) and (time >= 200000000000)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and ((UDFToDouble(key) < 400.0) and (time >= 200000000000))) (type: boolean)
+              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0) and (time >= 200000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP) (type: timestamp)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
index 410735c..cf10c52 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
@@ -17,6 +17,7 @@ package org.apache.hadoop.hive.ql.optimizer;
 
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.BitSet;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -86,6 +87,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 import com.google.common.collect.ImmutableSet;
@@ -506,53 +508,92 @@ public final class ConstantPropagateProcFactory {
      }
     }
     if (udf instanceof GenericUDFOPAnd) {
-      for (int i = 0; i < 2; i++) {
+      final BitSet positionsToRemove = new BitSet();
+      final List<ExprNodeDesc> notNullExprs = new ArrayList<ExprNodeDesc>();
+      final List<Integer> notNullExprsPositions = new ArrayList<Integer>();
+      final List<ExprNodeDesc> compareExprs = new ArrayList<ExprNodeDesc>();
+      for (int i = 0; i < newExprs.size(); i++) {
         ExprNodeDesc childExpr = newExprs.get(i);
-        ExprNodeDesc other = newExprs.get(Math.abs(i - 1));
         if (childExpr instanceof ExprNodeConstantDesc) {
           ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
           if (Boolean.TRUE.equals(c.getValue())) {
-
             // if true, prune it
-            return other;
+            positionsToRemove.set(i);
           } else {
-
-            // if false return false
+            // if false, return false
             return childExpr;
           }
-        } else // Try to fold (key = 86) and (key is not null) to (key = 86)
-        if (childExpr instanceof ExprNodeGenericFuncDesc &&
-            ((ExprNodeGenericFuncDesc)childExpr).getGenericUDF() instanceof GenericUDFOPNotNull &&
-            childExpr.getChildren().get(0) instanceof ExprNodeColumnDesc && other instanceof ExprNodeGenericFuncDesc
-            && ((ExprNodeGenericFuncDesc)other).getGenericUDF() instanceof GenericUDFBaseCompare
-            && other.getChildren().size() == 2) {
-          ExprNodeColumnDesc colDesc = getColumnExpr(other.getChildren().get(0));
+        } else if (childExpr instanceof ExprNodeGenericFuncDesc &&
+                ((ExprNodeGenericFuncDesc)childExpr).getGenericUDF() instanceof GenericUDFOPNotNull &&
+                childExpr.getChildren().get(0) instanceof ExprNodeColumnDesc) {
+          notNullExprs.add(childExpr.getChildren().get(0));
+          notNullExprsPositions.add(i);
+        } else if (childExpr instanceof ExprNodeGenericFuncDesc
+                && ((ExprNodeGenericFuncDesc)childExpr).getGenericUDF() instanceof GenericUDFBaseCompare
+                && childExpr.getChildren().size() == 2) {
+          ExprNodeColumnDesc colDesc = getColumnExpr(childExpr.getChildren().get(0));
           if (null == colDesc) {
-            colDesc = getColumnExpr(other.getChildren().get(1));
+            colDesc = getColumnExpr(childExpr.getChildren().get(1));
           }
-          if (null != colDesc && colDesc.isSame(childExpr.getChildren().get(0))) {
-            return other;
+          if (colDesc != null) {
+            compareExprs.add(colDesc);
+          }
+        }
+      }
+      // Try to fold (key = 86) and (key is not null) to (key = 86)
+      for (int i = 0; i < notNullExprs.size(); i++) {
+        for (ExprNodeDesc other : compareExprs) {
+          if (notNullExprs.get(i).isSame(other)) {
+            positionsToRemove.set(notNullExprsPositions.get(i));
+            break;
           }
         }
       }
+      // Remove unnecessary expressions
+      int pos = 0;
+      int removed = 0;
+      while ((pos = positionsToRemove.nextSetBit(pos)) != -1) {
+        newExprs.remove(pos - removed);
+        pos++;
+        removed++;
+      }
+      if (newExprs.size() == 0) {
+        return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, Boolean.TRUE);
+      }
+      if (newExprs.size() == 1) {
+        return newExprs.get(0);
+      }
     }
 
     if (udf instanceof GenericUDFOPOr) {
-      for (int i = 0; i < 2; i++) {
+      final BitSet positionsToRemove = new BitSet();
+      for (int i = 0; i < newExprs.size(); i++) {
         ExprNodeDesc childExpr = newExprs.get(i);
         if (childExpr instanceof ExprNodeConstantDesc) {
           ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
           if (Boolean.FALSE.equals(c.getValue())) {
-
             // if false, prune it
-            return newExprs.get(Math.abs(i - 1));
-          } else {
-
+            positionsToRemove.set(i);
+          } else
+          if (Boolean.TRUE.equals(c.getValue())) {
             // if true return true
             return childExpr;
           }
         }
       }
+      int pos = 0;
+      int removed = 0;
+      while ((pos = positionsToRemove.nextSetBit(pos)) != -1) {
+        newExprs.remove(pos - removed);
+        pos++;
+        removed++;
+      }
+      if (newExprs.size() == 0) {
+        return new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, Boolean.FALSE);
+      }
+      if (newExprs.size() == 1) {
+        return newExprs.get(0);
+      }
     }
 
     if (udf instanceof GenericUDFWhen) {

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
index d5102bc..71a6c73 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/pcr/PcrExprProcFactory.java
@@ -25,6 +25,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Stack;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
@@ -57,6 +59,9 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
  * It also generates node by Modifying expr trees with partition conditions removed
  */
 public final class PcrExprProcFactory {
+
+  public static final Log LOG = LogFactory.getLog(PcrExprProcFactory.class.getName());
+
   static Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs)
       throws SemanticException {
     StructObjectInspector rowObjectInspector;
@@ -124,25 +129,39 @@ public final class PcrExprProcFactory {
     // prevent instantiation
   }
 
-  static Boolean opAnd(Boolean op1, Boolean op2) {
+  static Boolean opAnd(Boolean... ops) {
     // When people forget to quote a string, op1/op2 is null.
     // For example, select * from some_table where ds > 2012-12-1 and ds < 2012-12-2 .
-    if (op1 != null && op1.equals(Boolean.FALSE) || op2 != null && op2.equals(Boolean.FALSE)) {
-      return Boolean.FALSE;
+    boolean anyNull = false;
+    for (Boolean op : ops) {
+      if (op == null) {
+        anyNull = true;
+        continue;
+      }
+      if (op.equals(Boolean.FALSE)) {
+        return Boolean.FALSE;
+      }
     }
-    if (op1 == null || op2 == null) {
+    if (anyNull) {
       return null;
     }
     return Boolean.TRUE;
   }
 
-  static Boolean opOr(Boolean op1, Boolean op2) {
+  static Boolean opOr(Boolean... ops) {
     // When people forget to quote a string, op1/op2 is null.
     // For example, select * from some_table where ds > 2012-12-1 or ds < 2012-12-2 .
-    if (op1 != null && op1.equals(Boolean.TRUE) || op2 != null && op2.equals(Boolean.TRUE)) {
-      return Boolean.TRUE;
+    boolean anyNull = false;
+    for (Boolean op : ops) {
+      if (op == null) {
+        anyNull = true;
+        continue;
+      }
+      if (op.equals(Boolean.TRUE)) {
+        return Boolean.TRUE;
+      }
     }
-    if (op1 == null || op2 == null) {
+    if (anyNull) {
       return null;
     }
     return Boolean.FALSE;
@@ -255,51 +274,95 @@ public final class PcrExprProcFactory {
               getOutExpr(fd, nodeOutputs));
         }
       } else if (FunctionRegistry.isOpAnd(fd)) {
-        assert (nodeOutputs.length == 2);
-        NodeInfoWrapper c1 = (NodeInfoWrapper)nodeOutputs[0];
-        NodeInfoWrapper c2 = (NodeInfoWrapper)nodeOutputs[1];
-
-        if (c1.state == WalkState.FALSE) {
-          return c1;
-        } else if (c2.state == WalkState.FALSE) {
-          return c2;
-        } else if (c1.state == WalkState.TRUE) {
-          return c2;
-        } else if (c2.state == WalkState.TRUE) {
-          return c1;
-        } else if (c1.state == WalkState.UNKNOWN || c2.state == WalkState.UNKNOWN) {
-          return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
-        } else if (c1.state == WalkState.DIVIDED && c2.state == WalkState.DIVIDED) {
+        boolean anyUnknown = false; // Whether any of the node outputs is unknown
+        boolean allDivided = true; // Whether all of the node outputs are divided
+        List<NodeInfoWrapper> newNodeOutputsList =
+                new ArrayList<NodeInfoWrapper>(nodeOutputs.length);
+        for (int i = 0; i < nodeOutputs.length; i++) {
+          NodeInfoWrapper c = (NodeInfoWrapper)nodeOutputs[i];
+          if (c.state == WalkState.FALSE) {
+            return c;
+          }
+          if (c.state == WalkState.UNKNOWN) {
+            anyUnknown = true;
+          }
+          if (c.state != WalkState.DIVIDED) {
+            allDivided = false;
+          }
+          if (c.state != WalkState.TRUE) {
+            newNodeOutputsList.add(c);
+          }
+        }
+        // If all of them were true, return true
+        if (newNodeOutputsList.size() == 0) {
+          return new NodeInfoWrapper(WalkState.TRUE, null,
+                  new ExprNodeConstantDesc(fd.getTypeInfo(), Boolean.TRUE));
+        }
+        // If we are left with a single child, return the child
+        if (newNodeOutputsList.size() == 1) {
+          return newNodeOutputsList.get(0);
+        }
+        Object[] newNodeOutputs = newNodeOutputsList.toArray();
+        if (anyUnknown) {
+          return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, newNodeOutputs));
+        }
+        if (allDivided) {
           Boolean[] results = new Boolean[ctx.getPartList().size()];
           for (int i = 0; i < ctx.getPartList().size(); i++) {
-            results[i] = opAnd(c1.ResultVector[i], c2.ResultVector[i]);
+            Boolean[] andArray = new Boolean[newNodeOutputs.length];
+            for (int j = 0; j < newNodeOutputs.length; j++) {
+              andArray[j] = ((NodeInfoWrapper) newNodeOutputs[j]).ResultVector[i];
+            }
+            results[i] = opAnd(andArray);
           }
-          return getResultWrapFromResults(results, fd, nodeOutputs);
+          return getResultWrapFromResults(results, fd, newNodeOutputs);
         }
-        return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
+        return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, newNodeOutputs));
       } else if (FunctionRegistry.isOpOr(fd)) {
-        assert (nodeOutputs.length == 2);
-        NodeInfoWrapper c1 = (NodeInfoWrapper)nodeOutputs[0];
-        NodeInfoWrapper c2 = (NodeInfoWrapper)nodeOutputs[1];
-
-        if (c1.state == WalkState.TRUE) {
-          return c1;
-        } else if (c2.state == WalkState.TRUE) {
-          return c2;
-        } else if (c1.state == WalkState.FALSE) {
-          return c2;
-        } else if (c2.state == WalkState.FALSE) {
-          return c1;
-        } else if (c1.state == WalkState.UNKNOWN || c2.state == WalkState.UNKNOWN) {
-          return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
-        } else if (c1.state == WalkState.DIVIDED && c2.state == WalkState.DIVIDED) {
+        boolean anyUnknown = false; // Whether any of the node outputs is unknown
+        boolean allDivided = true; // Whether all of the node outputs are divided
+        List<NodeInfoWrapper> newNodeOutputsList =
+                new ArrayList<NodeInfoWrapper>(nodeOutputs.length);
+        for (int i = 0; i< nodeOutputs.length; i++) {
+          NodeInfoWrapper c = (NodeInfoWrapper)nodeOutputs[i];
+          if (c.state == WalkState.TRUE) {
+            return c;
+          }
+          if (c.state == WalkState.UNKNOWN) {
+            anyUnknown = true;
+          }
+          if (c.state != WalkState.DIVIDED) {
+            allDivided = false;
+          }
+          if (c.state != WalkState.FALSE) {
+            newNodeOutputsList.add(c);
+          }
+        }
+        // If all of them were false, return false
+        if (newNodeOutputsList.size() == 0) {
+          return new NodeInfoWrapper(WalkState.FALSE, null,
+                  new ExprNodeConstantDesc(fd.getTypeInfo(), Boolean.FALSE));
+        }
+        // If we are left with a single child, return the child
+        if (newNodeOutputsList.size() == 1) {
+          return newNodeOutputsList.get(0);
+        }
+        Object[] newNodeOutputs = newNodeOutputsList.toArray();
+        if (anyUnknown) {
+          return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, newNodeOutputs));
+        }
+        if (allDivided) {
           Boolean[] results = new Boolean[ctx.getPartList().size()];
           for (int i = 0; i < ctx.getPartList().size(); i++) {
-            results[i] = opOr(c1.ResultVector[i], c2.ResultVector[i]);
+            Boolean[] orArray = new Boolean[newNodeOutputs.length];
+            for (int j = 0; j < newNodeOutputs.length; j++) {
+              orArray[j] = ((NodeInfoWrapper) newNodeOutputs[j]).ResultVector[i];
+            }
+            results[i] = opOr(orArray);
           }
-          return getResultWrapFromResults(results, fd, nodeOutputs);
+          return getResultWrapFromResults(results, fd, newNodeOutputs);
         }
-        return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
+        return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, newNodeOutputs));
       } else if (!FunctionRegistry.isDeterministic(fd.getGenericUDF())) {
         // If it's a non-deterministic UDF, set unknown to true
         return new NodeInfoWrapper(WalkState.UNKNOWN, null,

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
index dccb598..d264559 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
@@ -277,35 +277,64 @@ public class PartitionPruner implements Transform {
       GenericUDF udf = ((ExprNodeGenericFuncDesc)expr).getGenericUDF();
       boolean isAnd = udf instanceof GenericUDFOPAnd;
       boolean isOr = udf instanceof GenericUDFOPOr;
+      List<ExprNodeDesc> children = expr.getChildren();
 
-      if (isAnd || isOr) {
-        List<ExprNodeDesc> children = expr.getChildren();
-        ExprNodeDesc left = compactExpr(children.get(0));
-        ExprNodeDesc right = compactExpr(children.get(1));
+      if (isAnd) {
         // Non-partition expressions are converted to nulls.
-        if (left == null && right == null) {
+        List<ExprNodeDesc> newChildren = new ArrayList<ExprNodeDesc>();
+        boolean allTrue = true;
+        for (ExprNodeDesc child : children) {
+          ExprNodeDesc compactChild = compactExpr(child);
+          if (compactChild != null) {
+            if (!isTrueExpr(compactChild)) {
+              newChildren.add(compactChild);
+              allTrue = false;
+            }
+            if (isFalseExpr(compactChild)) {
+              return new ExprNodeConstantDesc(Boolean.FALSE);
+            }
+          } else {
+            allTrue = false;
+          }
+        }
+
+        if (newChildren.size() == 0) {
           return null;
-        } else if (left == null) {
-          return isAnd ? right : null;
-        } else if (right == null) {
-          return isAnd ? left : null;
         }
-        // Handle boolean expressions
-        boolean isLeftFalse = isFalseExpr(left), isRightFalse = isFalseExpr(right),
-            isLeftTrue = isTrueExpr(left), isRightTrue = isTrueExpr(right);
-        if ((isRightTrue && isLeftTrue) || (isOr && (isLeftTrue || isRightTrue))) {
+        if (newChildren.size() == 1) {
+          return newChildren.get(0);
+        }
+        if (allTrue) {
           return new ExprNodeConstantDesc(Boolean.TRUE);
-        } else if ((isRightFalse && isLeftFalse) || (isAnd && (isLeftFalse || isRightFalse))) {
+        }
+        // Nothing to compact, update expr with compacted children.
+        ((ExprNodeGenericFuncDesc) expr).setChildren(newChildren);
+      } else if (isOr) {
+        // Non-partition expressions are converted to nulls.
+        List<ExprNodeDesc> newChildren = new ArrayList<ExprNodeDesc>();
+        boolean allFalse = true;
+        for (ExprNodeDesc child : children) {
+          ExprNodeDesc compactChild = compactExpr(child);
+          if (compactChild != null) {
+            if (isTrueExpr(compactChild)) {
+              return new ExprNodeConstantDesc(Boolean.TRUE);
+            }
+            if (!isFalseExpr(compactChild)) {
+              newChildren.add(compactChild);
+              allFalse = false;
+            }
+          } else {
+            return null;
+          }
+        }
+
+        if (allFalse) {
           return new ExprNodeConstantDesc(Boolean.FALSE);
-        } else if ((isAnd && isLeftTrue) || (isOr && isLeftFalse)) {
-          return right;
-        } else if ((isAnd && isRightTrue) || (isOr && isRightFalse)) {
-          return left;
         }
         // Nothing to compact, update expr with compacted children.
-        children.set(0, left);
-        children.set(1, right);
+        ((ExprNodeGenericFuncDesc) expr).setChildren(newChildren);
       }
+
       return expr;
     } else {
       throw new IllegalStateException("Unexpected type of ExprNodeDesc: " + expr.getExprString());

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index d823f03..cd68f4e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -62,7 +62,9 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.SettableUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -814,10 +816,12 @@ public class TypeCheckProcFactory {
           ((SettableUDF)genericUDF).setTypeInfo(typeInfo);
         }
       }
-
+      
       List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(children.length);
+
       childrenList.addAll(Arrays.asList(children));
-      return ExprNodeGenericFuncDesc.newInstance(genericUDF, childrenList);
+      return ExprNodeGenericFuncDesc.newInstance(genericUDF,
+          childrenList);
     }
 
     public static ExprNodeDesc getFuncExprNodeDesc(String udfName,
@@ -1048,8 +1052,36 @@ public class TypeCheckProcFactory {
             }
           }
         }
-
-        desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText, children);
+        if (genericUDF instanceof GenericUDFOPOr) {
+          // flatten OR
+          List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(
+              children.size());
+          for (ExprNodeDesc child : children) {
+            if (FunctionRegistry.isOpOr(child)) {
+              childrenList.addAll(child.getChildren());
+            } else {
+              childrenList.add(child);
+            }
+          }
+          desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText,
+              childrenList);
+        } else if (genericUDF instanceof GenericUDFOPAnd) {
+          // flatten AND
+          List<ExprNodeDesc> childrenList = new ArrayList<ExprNodeDesc>(
+              children.size());
+          for (ExprNodeDesc child : children) {
+            if (FunctionRegistry.isOpAnd(child)) {
+              childrenList.addAll(child.getChildren());
+            } else {
+              childrenList.add(child);
+            }
+          }
+          desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText,
+              childrenList);
+        } else {
+          desc = ExprNodeGenericFuncDesc.newInstance(genericUDF, funcText,
+              children);
+        }
       }
       // UDFOPPositive is a no-op.
       // However, we still create it, and then remove it here, to make sure we

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java
index 47abb20..db7fbac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPAnd.java
@@ -35,46 +35,43 @@ import org.apache.hadoop.io.BooleanWritable;
 /**
  * GenericUDF Class for computing and.
  */
-@Description(name = "and", value = "a _FUNC_ b - Logical and")
+@Description(name = "and", value = "a1 _FUNC_ a2 _FUNC_ ... _FUNC_ an - Logical and")
 @VectorizedExpressions({ColAndCol.class, FilterExprAndExpr.class, FilterColAndScalar.class,
     FilterScalarAndColumn.class})
 public class GenericUDFOPAnd extends GenericUDF {
   private final BooleanWritable result = new BooleanWritable();
-  private transient BooleanObjectInspector boi0,boi1;
+  private transient BooleanObjectInspector boi[];
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
       throws UDFArgumentException {
-    if (arguments.length != 2) {
+    if (arguments.length < 2) {
       throw new UDFArgumentLengthException(
-          "The operator 'AND' only accepts 2 argument.");
+          "The operator 'AND' accepts at least 2 argument.");
+    }
+    boi = new BooleanObjectInspector[arguments.length];
+    for (int i = 0; i < arguments.length; i++) {
+      boi[i] = (BooleanObjectInspector) arguments[i];
     }
-    boi0 = (BooleanObjectInspector) arguments[0];
-    boi1 = (BooleanObjectInspector) arguments[1];
     return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
   }
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    boolean bool_a0 = false, bool_a1 = false;
-    Object a0 = arguments[0].get();
-    if (a0 != null) {
-      bool_a0 = boi0.get(a0);
-      if (bool_a0 == false) {
-        result.set(false);
-        return result;
-      }
-    }
-
-    Object a1 = arguments[1].get();
-    if (a1 != null) {
-      bool_a1 = boi1.get(a1);
-      if (bool_a1 == false) {
-        result.set(false);
-        return result;
+    boolean notNull = true;
+    for (int i = 0; i < arguments.length; i++) {
+      Object a = arguments[i].get();
+      if (a != null) {
+        boolean bool_a = boi[i].get(a);
+        if (bool_a == false) {
+          result.set(false);
+          return result;
+        }
+      } else {
+        notNull = false;
       }
     }
 
-    if ((a0 != null && bool_a0 == true) && (a1 != null && bool_a1 == true)) {
+    if (notNull) {
       result.set(true);
       return result;
     }
@@ -84,8 +81,20 @@ public class GenericUDFOPAnd extends GenericUDF {
 
   @Override
   public String getDisplayString(String[] children) {
-    assert (children.length == 2);
-    return "(" + children[0] + " and " + children[1] + ")";
+    assert (children.length >= 2);
+    StringBuilder sb = new StringBuilder();
+    sb.append("(");
+    boolean first = true;
+    for (String and : children) {
+      if (!first) {
+        sb.append(" and ");
+      } else {
+        first = false;
+      }
+      sb.append(and);
+    }
+    sb.append(")");
+    return sb.toString();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java
index cd656a0..4160610 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPOr.java
@@ -35,47 +35,44 @@ import org.apache.hadoop.io.BooleanWritable;
 /**
  * GenericUDF Class for computing or.
  */
-@Description(name = "or", value = "a _FUNC_ b - Logical or")
+@Description(name = "or", value = "a1 _FUNC_ a2 _FUNC_ ... _FUNC_ an - Logical or")
 @VectorizedExpressions({ColOrCol.class, FilterExprOrExpr.class, FilterColOrScalar.class,
     FilterScalarOrColumn.class})
 public class GenericUDFOPOr extends GenericUDF {
   private final BooleanWritable result = new BooleanWritable();
-  private transient BooleanObjectInspector boi0,boi1;
+  private transient BooleanObjectInspector[] boi;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
       throws UDFArgumentException {
-    if (arguments.length != 2) {
+    if (arguments.length < 2) {
       throw new UDFArgumentLengthException(
-          "The operator 'OR' only accepts 2 argument.");
+          "The operator 'OR' accepts at least 2 arguments.");
+    }
+    boi = new BooleanObjectInspector[arguments.length];
+    for (int i = 0; i < arguments.length; i++) {
+      boi[i] = (BooleanObjectInspector) arguments[i];
     }
-    boi0 = (BooleanObjectInspector) arguments[0];
-    boi1 = (BooleanObjectInspector) arguments[1];
     return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
   }
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    boolean bool_a0 = false, bool_a1 = false;
-    Object a0 = arguments[0].get();
-    if (a0 != null) {
-      bool_a0 = boi0.get(a0);
-      if (bool_a0 == true) {
-        result.set(true);
-        return result;
-      }
-    }
-
-    Object a1 = arguments[1].get();
-    if (a1 != null) {
-      bool_a1 = boi1.get(a1);
-      if (bool_a1 == true) {
-        result.set(true);
-        return result;
+    boolean notNull = true;
+    for (int i = 0; i < arguments.length; i++) {
+      Object a = arguments[i].get();
+      if (a != null) {
+        boolean bool_a = boi[i].get(a);
+        if (bool_a == true) {
+          result.set(true);
+          return result;
+        }
+      } else {
+        notNull = false;
       }
     }
 
-    if ((a0 != null && bool_a0 == false) && (a1 != null && bool_a1 == false)) {
+    if (notNull) {
       result.set(false);
       return result;
     }
@@ -85,8 +82,20 @@ public class GenericUDFOPOr extends GenericUDF {
 
   @Override
   public String getDisplayString(String[] children) {
-    assert (children.length == 2);
-    return "(" + children[0] + " or " + children[1] + ")";
+    assert (children.length >= 2);
+    StringBuilder sb = new StringBuilder();
+    sb.append("(");
+    boolean first = true;
+    for (String or : children) {
+      if (!first) {
+        sb.append(" or ");
+      } else {
+        first = false;
+      }
+      sb.append(or);
+    }
+    sb.append(")");
+    return sb.toString();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/queries/clientpositive/flatten_and_or.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/flatten_and_or.q b/ql/src/test/queries/clientpositive/flatten_and_or.q
new file mode 100644
index 0000000..6d65225
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/flatten_and_or.q
@@ -0,0 +1,17 @@
+explain
+SELECT key
+FROM src
+WHERE
+   ((key = '0'
+   AND value = '8') OR (key = '1'
+   AND value = '5') OR (key = '2'
+   AND value = '6') OR (key = '3'
+   AND value = '8') OR (key = '4'
+   AND value = '1') OR (key = '5'
+   AND value = '6') OR (key = '6'
+   AND value = '1') OR (key = '7'
+   AND value = '1') OR (key = '8'
+   AND value = '1') OR (key = '9'
+   AND value = '1') OR (key = '10'
+   AND value = '3'))
+;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out b/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out
index 788d6c8..fc4f294 100644
--- a/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out
+++ b/ql/src/test/results/clientpositive/annotate_stats_deep_filters.q.out
@@ -120,7 +120,7 @@ STAGE PLANS:
             alias: over1k
             Statistics: Num rows: 2098 Data size: 211174 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
-              predicate: (((t = 1) and (si = 2)) or (((t = 2) and (si = 3)) or (((t = 3) and (si = 4)) or (((t = 4) and (si = 5)) or (((t = 5) and (si = 6)) or (((t = 6) and (si = 7)) or (((t = 7) and (si = 8)) or (((t = 9) and (si = 10)) or (((t = 10) and (si = 11)) or (((t = 11) and (si = 12)) or (((t = 12) and (si = 13)) or (((t = 13) and (si = 14)) or (((t = 14) and (si = 15)) or (((t = 15) and (si = 16)) or (((t = 16) and (si = 17)) or (((t = 17) and (si = 18)) or (((t = 27) and (si = 28)) or (((t = 37) and (si = 38)) or (((t = 47) and (si = 48)) or ((t = 52) and (si = 53))))))))))))))))))))) (type: boolean)
+              predicate: (((t = 1) and (si = 2)) or ((t = 2) and (si = 3)) or ((t = 3) and (si = 4)) or ((t = 4) and (si = 5)) or ((t = 5) and (si = 6)) or ((t = 6) and (si = 7)) or ((t = 7) and (si = 8)) or ((t = 9) and (si = 10)) or ((t = 10) and (si = 11)) or ((t = 11) and (si = 12)) or ((t = 12) and (si = 13)) or ((t = 13) and (si = 14)) or ((t = 14) and (si = 15)) or ((t = 15) and (si = 16)) or ((t = 16) and (si = 17)) or ((t = 17) and (si = 18)) or ((t = 27) and (si = 28)) or ((t = 37) and (si = 38)) or ((t = 47) and (si = 48)) or ((t = 52) and (si = 53))) (type: boolean)
               Statistics: Num rows: 280 Data size: 2232 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
                 Statistics: Num rows: 280 Data size: 2232 Basic stats: COMPLETE Column stats: COMPLETE
@@ -209,7 +209,7 @@ STAGE PLANS:
             alias: over1k
             Statistics: Num rows: 2098 Data size: 211174 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((t = 1) and (si = 2)) or (((t = 2) and (si = 3)) or (((t = 3) and (si = 4)) or (((t = 4) and (si = 5)) or (((t = 5) and (si = 6)) or (((t = 6) and (si = 7)) or (((t = 7) and (si = 8)) or (((t = 9) and (si = 10)) or (((t = 10) and (si = 11)) or (((t = 11) and (si = 12)) or (((t = 12) and (si = 13)) or (((t = 13) and (si = 14)) or (((t = 14) and (si = 15)) or (((t = 15) and (si = 16)) or (((t = 16) and (si = 17)) or (((t = 17) and (si = 18)) or (((t = 27) and (si = 28)) or (((t = 37) and (si = 38)) or (((t = 47) and (si = 48)) or ((t = 52) and (si = 53))))))))))))))))))))) (type: boolean)
+              predicate: (((t = 1) and (si = 2)) or ((t = 2) and (si = 3)) or ((t = 3) and (si = 4)) or ((t = 4) and (si = 5)) or ((t = 5) and (si = 6)) or ((t = 6) and (si = 7)) or ((t = 7) and (si = 8)) or ((t = 9) and (si = 10)) or ((t = 10) and (si = 11)) or ((t = 11) and (si = 12)) or ((t = 12) and (si = 13)) or ((t = 13) and (si = 14)) or ((t = 14) and (si = 15)) or ((t = 15) and (si = 16)) or ((t = 16) and (si = 17)) or ((t = 17) and (si = 18)) or ((t = 27) and (si = 28)) or ((t = 37) and (si = 38)) or ((t = 47) and (si = 48)) or ((t = 52) and (si = 53))) (type: boolean)
               Statistics: Num rows: 2098 Data size: 211174 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 Statistics: Num rows: 2098 Data size: 211174 Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
index 394af7e..eeb5847 100644
--- a/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
@@ -1030,7 +1030,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2, _col3, _col7, _col11, _col12, _col16
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
-            predicate: (((_col1 = _col7) and (_col3 = _col11)) and (_col0 = _col16)) (type: boolean)
+            predicate: ((_col1 = _col7) and (_col3 = _col11) and (_col0 = _col16)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: _col12 (type: string), _col11 (type: int), _col7 (type: int), 3 (type: int), _col2 (type: int)
@@ -1111,7 +1111,7 @@ STAGE PLANS:
           outputColumnNames: _col1, _col2, _col3, _col5, _col6, _col8, _col9, _col10, _col12, _col13
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
-            predicate: ((((_col2 = _col9) and (_col1 = _col8)) and (_col3 = 3)) and (_col10 = 4)) (type: boolean)
+            predicate: ((_col2 = _col9) and (_col1 = _col8) and (_col3 = 3) and (_col10 = 4)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: _col1 (type: int), _col2 (type: int), _col5 (type: double), _col6 (type: double), _col8 (type: int), _col9 (type: int), _col12 (type: double), _col13 (type: double)
@@ -1257,7 +1257,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2, _col3, _col7, _col11, _col12, _col16
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
           Filter Operator
-            predicate: (((_col1 = _col7) and (_col3 = _col11)) and (_col0 = _col16)) (type: boolean)
+            predicate: ((_col1 = _col7) and (_col3 = _col11) and (_col0 = _col16)) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Select Operator
               expressions: _col12 (type: string), _col11 (type: int), _col7 (type: int), 4 (type: int), _col2 (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/flatten_and_or.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/flatten_and_or.q.out b/ql/src/test/results/clientpositive/flatten_and_or.q.out
new file mode 100644
index 0000000..9c51ff3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/flatten_and_or.q.out
@@ -0,0 +1,66 @@
+PREHOOK: query: explain
+SELECT key
+FROM src
+WHERE
+   ((key = '0'
+   AND value = '8') OR (key = '1'
+   AND value = '5') OR (key = '2'
+   AND value = '6') OR (key = '3'
+   AND value = '8') OR (key = '4'
+   AND value = '1') OR (key = '5'
+   AND value = '6') OR (key = '6'
+   AND value = '1') OR (key = '7'
+   AND value = '1') OR (key = '8'
+   AND value = '1') OR (key = '9'
+   AND value = '1') OR (key = '10'
+   AND value = '3'))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+SELECT key
+FROM src
+WHERE
+   ((key = '0'
+   AND value = '8') OR (key = '1'
+   AND value = '5') OR (key = '2'
+   AND value = '6') OR (key = '3'
+   AND value = '8') OR (key = '4'
+   AND value = '1') OR (key = '5'
+   AND value = '6') OR (key = '6'
+   AND value = '1') OR (key = '7'
+   AND value = '1') OR (key = '8'
+   AND value = '1') OR (key = '9'
+   AND value = '1') OR (key = '10'
+   AND value = '3'))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (((key = '0') and (value = '8')) or ((key = '1') and (value = '5')) or ((key = '2') and (value = '6')) or ((key = '3') and (value = '8')) or ((key = '4') and (value = '1')) or ((key = '5') and (value = '6')) or ((key = '6') and (value = '1')) or ((key = '7') and (value = '1')) or ((key = '8') and (value = '1')) or ((key = '9') and (value = '1')) or ((key = '10') and (value = '3'))) (type: boolean)
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/groupby_multi_single_reducer3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_multi_single_reducer3.q.out b/ql/src/test/results/clientpositive/groupby_multi_single_reducer3.q.out
index 616eaa3..ca66c67 100644
--- a/ql/src/test/results/clientpositive/groupby_multi_single_reducer3.q.out
+++ b/ql/src/test/results/clientpositive/groupby_multi_single_reducer3.q.out
@@ -225,7 +225,7 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or ((((key + key) = 200) or ((key - 100) = 100)) or ((key = 300) and value is not null))) (type: boolean)
+              predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or (((key + key) = 200) or ((key - 100) = 100) or ((key = 300) and value is not null))) (type: boolean)
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: value (type: string)
@@ -237,7 +237,7 @@ STAGE PLANS:
         Forward
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
-            predicate: ((((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100)) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
+            predicate: (((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Group By Operator
               aggregations: count()
@@ -557,7 +557,7 @@ STAGE PLANS:
             alias: src
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or ((((key + key) = 200) or ((key - 100) = 100)) or ((key = 300) and value is not null))) (type: boolean)
+              predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or (((key + key) = 200) or ((key - 100) = 100) or ((key = 300) and value is not null))) (type: boolean)
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: value (type: string)
@@ -569,7 +569,7 @@ STAGE PLANS:
         Forward
           Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
-            predicate: ((((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100)) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
+            predicate: (((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
             Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
             Group By Operator
               aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/input_testxpath4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_testxpath4.q.out b/ql/src/test/results/clientpositive/input_testxpath4.q.out
index b522b8a..4aea350 100644
--- a/ql/src/test/results/clientpositive/input_testxpath4.q.out
+++ b/ql/src/test/results/clientpositive/input_testxpath4.q.out
@@ -24,7 +24,7 @@ STAGE PLANS:
             alias: src_thrift
             Statistics: Num rows: 11 Data size: 3070 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) (type: boolean)
+              predicate: (mstringstring['key_9'] is not null and lintstring.myint is not null and lintstring is not null) (type: boolean)
               Statistics: Num rows: 2 Data size: 558 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: mstringstring['key_9'] (type: string), lintstring.myint (type: array<int>)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out b/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
index 26db67e..6ff13e4 100644
--- a/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
+++ b/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
@@ -282,7 +282,7 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44
           Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
-            predicate: (((_col13 = _col25) and (_col0 = _col36)) and (_col0 = _col12)) (type: boolean)
+            predicate: ((_col13 = _col25) and (_col0 = _col36) and (_col0 = _col12)) (type: boolean)
             Statistics: Num rows: 1 Data size: 123 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string), _col36 (type: int), _col37 (type: string), _col38 (type: string), _col39 (type: string), _col40 (type: string), _col41 (type: int), _col42 (type: string), _col43 (type: double), _col44 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/lineage3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/lineage3.q.out b/ql/src/test/results/clientpositive/lineage3.q.out
index 8a7bd3e..75d88f8 100644
--- a/ql/src/test/results/clientpositive/lineage3.q.out
+++ b/ql/src/test/results/clientpositive/lineage3.q.out
@@ -269,7 +269,7 @@ PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@dest_v3
-{"version":"1.0","engine":"mr","hash":"a0c2481ce1c24895a43a950f93a10da7","queryText":"create view dest_v3 (a1, a2, a3, a4, a5, a6, a7) as\n  select x.csmallint, x.cbigint bint1, x.ctinyint, c.cbigint bint2, x.cint, x.cfloat, c.cstring1\n  from alltypesorc c\n  join (\n     select a.csmallint csmallint, a.ctinyint ctinyint, a.cstring2 cstring2,\n           a.cint cint, a.cstring1 ctring1, b.cfloat cfloat, b.cbigint cbigint\n     from ( select * from alltypesorc a where cboolean1=true ) a\n     join alltypesorc b on (a.csmallint = b.cint)\n   ) x on (x.ctinyint = c.cbigint)\n  where x.csmallint=11\n  and x.cint > 899\n  and x.cfloat > 4.5\n  and c.cstring1 < '7'\n  and x.cint + x.cfloat + length(c.cstring1) < 1000","edges":[{"sources":[7],"targets":[0],"expression":"x._col15","edgeType":"PROJECTION"},{"sources":[8],"targets":[1,2],"edgeType":"PROJECTION"},{"sources":[9],"targets":[3],"expression":"x._col16","edgeType":"PROJECTION"},{"sources":[10],"targets":[4],"expression":"x._col18"
 ,"edgeType":"PROJECTION"},{"sources":[11],"targets":[5],"edgeType":"PROJECTION"},{"sources":[12],"targets":[6],"edgeType":"PROJECTION"},{"sources":[13],"targets":[0,1,3,2,4,5,6],"expression":"(a.cboolean1 = true)","edgeType":"PREDICATE"},{"sources":[7,10,12,11],"targets":[0,1,3,2,4,5,6],"expression":"(((((x.csmallint = 11) and (x.cint > 899)) and (x.cfloat > 4.5)) and (c.cstring1 < '7')) and (((x.cint + x.cfloat) + length(c.cstring1)) < 1000))","edgeType":"PREDICATE"},{"sources":[7,10],"targets":[0,1,3,2,4,5,6],"expression":"(UDFToInteger(a._col1) = b.cint)","edgeType":"PREDICATE"},{"sources":[8,9],"targets":[0,1,3,2,4,5,6],"expression":"(c.cbigint = UDFToLong(x._col1))","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_v3.csmallint"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_v3.bint1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_v3.bint2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_v3.ctinyint"},{"id":4,"
 vertexType":"COLUMN","vertexId":"default.dest_v3.cint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.dest_v3.cfloat"},{"id":6,"vertexType":"COLUMN","vertexId":"default.dest_v3.cstring1"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cbigint"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":10,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":11,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"},{"id":12,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":13,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"}]}
+{"version":"1.0","engine":"mr","hash":"a0c2481ce1c24895a43a950f93a10da7","queryText":"create view dest_v3 (a1, a2, a3, a4, a5, a6, a7) as\n  select x.csmallint, x.cbigint bint1, x.ctinyint, c.cbigint bint2, x.cint, x.cfloat, c.cstring1\n  from alltypesorc c\n  join (\n     select a.csmallint csmallint, a.ctinyint ctinyint, a.cstring2 cstring2,\n           a.cint cint, a.cstring1 ctring1, b.cfloat cfloat, b.cbigint cbigint\n     from ( select * from alltypesorc a where cboolean1=true ) a\n     join alltypesorc b on (a.csmallint = b.cint)\n   ) x on (x.ctinyint = c.cbigint)\n  where x.csmallint=11\n  and x.cint > 899\n  and x.cfloat > 4.5\n  and c.cstring1 < '7'\n  and x.cint + x.cfloat + length(c.cstring1) < 1000","edges":[{"sources":[7],"targets":[0],"expression":"x._col15","edgeType":"PROJECTION"},{"sources":[8],"targets":[1,2],"edgeType":"PROJECTION"},{"sources":[9],"targets":[3],"expression":"x._col16","edgeType":"PROJECTION"},{"sources":[10],"targets":[4],"expression":"x._col18"
 ,"edgeType":"PROJECTION"},{"sources":[11],"targets":[5],"edgeType":"PROJECTION"},{"sources":[12],"targets":[6],"edgeType":"PROJECTION"},{"sources":[13],"targets":[0,1,3,2,4,5,6],"expression":"(a.cboolean1 = true)","edgeType":"PREDICATE"},{"sources":[7,10,12,11],"targets":[0,1,3,2,4,5,6],"expression":"((x.csmallint = 11) and (x.cint > 899) and (x.cfloat > 4.5) and (c.cstring1 < '7') and (((x.cint + x.cfloat) + length(c.cstring1)) < 1000))","edgeType":"PREDICATE"},{"sources":[7,10],"targets":[0,1,3,2,4,5,6],"expression":"(UDFToInteger(a._col1) = b.cint)","edgeType":"PREDICATE"},{"sources":[8,9],"targets":[0,1,3,2,4,5,6],"expression":"(c.cbigint = UDFToLong(x._col1))","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_v3.csmallint"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_v3.bint1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_v3.bint2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_v3.ctinyint"},{"id":4,"vertex
 Type":"COLUMN","vertexId":"default.dest_v3.cint"},{"id":5,"vertexType":"COLUMN","vertexId":"default.dest_v3.cfloat"},{"id":6,"vertexType":"COLUMN","vertexId":"default.dest_v3.cstring1"},{"id":7,"vertexType":"COLUMN","vertexId":"default.alltypesorc.csmallint"},{"id":8,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cbigint"},{"id":9,"vertexType":"COLUMN","vertexId":"default.alltypesorc.ctinyint"},{"id":10,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cint"},{"id":11,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cfloat"},{"id":12,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cstring1"},{"id":13,"vertexType":"COLUMN","vertexId":"default.alltypesorc.cboolean1"}]}
 PREHOOK: query: alter view dest_v3 as
   select * from (
     select sum(a.ctinyint) over (partition by a.csmallint order by a.csmallint) a,

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
index bb5fedb..0d4cd15 100644
--- a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
+++ b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
@@ -768,28 +768,28 @@ STAGE PLANS:
             alias: orc_pred
             Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
-              Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+              predicate: ((((((d >= 10.0) and (d < 12.0)) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+              Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col3 (type: string)
                   sort order: -
-                  Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -833,31 +833,31 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            filterExpr: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+            filterExpr: ((((((d >= 10.0) and (d < 12.0)) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
             Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
-              Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+              predicate: ((((((d >= 10.0) and (d < 12.0)) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+              Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col3 (type: string)
                   sort order: -
-                  Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double)
       Reduce Operator Tree:
         Select Operator
           expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 1186 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 3
-            Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -970,7 +970,7 @@ STAGE PLANS:
             alias: orc_pred
             Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+              predicate: ((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
               Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
@@ -1064,10 +1064,10 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: orc_pred
-            filterExpr: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+            filterExpr: ((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
             Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
-              predicate: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+              predicate: ((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
               Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE
               Select Operator
                 expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/ppd_gby_join.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_gby_join.q.out b/ql/src/test/results/clientpositive/ppd_gby_join.q.out
index 1acfc3d..e3f71e7 100644
--- a/ql/src/test/results/clientpositive/ppd_gby_join.q.out
+++ b/ql/src/test/results/clientpositive/ppd_gby_join.q.out
@@ -42,7 +42,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((_col0 > '20') and (((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400'))) (type: boolean)
+                  predicate: ((_col0 > '20') and ((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400')) (type: boolean)
                   Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string)
@@ -73,7 +73,7 @@ STAGE PLANS:
                   predicate: (_col0 <> '4') (type: boolean)
                   Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((_col0 > '1') and ((_col0 > '20') and (_col0 < '400'))) (type: boolean)
+                    predicate: ((_col0 > '1') and (_col0 > '20') and (_col0 < '400')) (type: boolean)
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Filter Operator
                       predicate: _col0 is not null (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/ppd_join.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_join.q.out b/ql/src/test/results/clientpositive/ppd_join.q.out
index 2186a54..58c4e43 100644
--- a/ql/src/test/results/clientpositive/ppd_join.q.out
+++ b/ql/src/test/results/clientpositive/ppd_join.q.out
@@ -39,7 +39,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((_col0 > '20') and (((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400'))) (type: boolean)
+                  predicate: ((_col0 > '20') and ((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400')) (type: boolean)
                   Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string)
@@ -70,7 +70,7 @@ STAGE PLANS:
                   predicate: (_col0 <> '4') (type: boolean)
                   Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((_col0 > '1') and ((_col0 > '20') and (_col0 < '400'))) (type: boolean)
+                    predicate: ((_col0 > '1') and (_col0 > '20') and (_col0 < '400')) (type: boolean)
                     Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Filter Operator
                       predicate: _col0 is not null (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/ppd_join2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_join2.q.out b/ql/src/test/results/clientpositive/ppd_join2.q.out
index 335d995..e99839e 100644
--- a/ql/src/test/results/clientpositive/ppd_join2.q.out
+++ b/ql/src/test/results/clientpositive/ppd_join2.q.out
@@ -46,7 +46,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 42 Data size: 446 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((_col0 <> '311') and (((_col1 <> 'val_50') or (_col0 > '1')) and (_col0 < '400'))) (type: boolean)
+                  predicate: ((_col0 <> '311') and ((_col1 <> 'val_50') or (_col0 > '1')) and (_col0 < '400')) (type: boolean)
                   Statistics: Num rows: 14 Data size: 148 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((_col0 <> '305') and (_col0 <> '14')) (type: boolean)
@@ -74,7 +74,7 @@ STAGE PLANS:
                   predicate: (_col0 <> '14') (type: boolean)
                   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((_col0 <> '302') and ((_col0 <> '311') and (_col0 < '400'))) (type: boolean)
+                    predicate: ((_col0 <> '302') and (_col0 <> '311') and (_col0 < '400')) (type: boolean)
                     Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
                     Filter Operator
                       predicate: _col0 is not null (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/ppd_join3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_join3.q.out b/ql/src/test/results/clientpositive/ppd_join3.q.out
index d658cfb..f2b0b60 100644
--- a/ql/src/test/results/clientpositive/ppd_join3.q.out
+++ b/ql/src/test/results/clientpositive/ppd_join3.q.out
@@ -46,7 +46,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1
                 Statistics: Num rows: 28 Data size: 297 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((_col0 > '0') and (((_col1 <> 'val_500') or (_col0 > '1')) and (_col0 < '400'))) (type: boolean)
+                  predicate: ((_col0 > '0') and ((_col1 <> 'val_500') or (_col0 > '1')) and (_col0 < '400')) (type: boolean)
                   Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string)
@@ -77,7 +77,7 @@ STAGE PLANS:
                   predicate: (_col0 <> '4') (type: boolean)
                   Statistics: Num rows: 28 Data size: 297 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((_col0 <> '11') and ((_col0 > '0') and (_col0 < '400'))) (type: boolean)
+                    predicate: ((_col0 <> '11') and (_col0 > '0') and (_col0 < '400')) (type: boolean)
                     Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                     Filter Operator
                       predicate: _col0 is not null (type: boolean)
@@ -134,7 +134,7 @@ STAGE PLANS:
                   predicate: (_col0 <> '1') (type: boolean)
                   Statistics: Num rows: 28 Data size: 297 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((_col0 <> '11') and ((_col0 > '0') and ((_col0 < '400') and ((_col0 <> '12') and (_col0 <> '4'))))) (type: boolean)
+                    predicate: ((_col0 <> '11') and (_col0 > '0') and (_col0 < '400') and (_col0 <> '12') and (_col0 <> '4')) (type: boolean)
                     Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                     Filter Operator
                       predicate: _col0 is not null (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_outer_join4.q.out b/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
index 2d1333b..9997166 100644
--- a/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
+++ b/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
@@ -122,7 +122,7 @@ STAGE PLANS:
                 outputColumnNames: _col0
                 Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((_col0 > '10') and ((_col0 < '20') and ((_col0 > '15') and (_col0 < '25')))) (type: boolean)
+                  predicate: ((_col0 > '10') and (_col0 < '20') and (_col0 > '15') and (_col0 < '25')) (type: boolean)
                   Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: _col0 is not null (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index 7045855..7e9a0f3 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -833,7 +833,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2, _col3, _col7, _col11, _col12, _col16
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Filter Operator
-                  predicate: (((_col1 = _col7) and (_col3 = _col11)) and (_col0 = _col16)) (type: boolean)
+                  predicate: ((_col1 = _col7) and (_col3 = _col11) and (_col0 = _col16)) (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
                     expressions: _col12 (type: string), _col11 (type: int), _col7 (type: int), 4 (type: int), _col2 (type: int)
@@ -887,7 +887,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2, _col3, _col7, _col11, _col12, _col16
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Filter Operator
-                  predicate: (((_col1 = _col7) and (_col3 = _col11)) and (_col0 = _col16)) (type: boolean)
+                  predicate: ((_col1 = _col7) and (_col3 = _col11) and (_col0 = _col16)) (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
                     expressions: _col12 (type: string), _col11 (type: int), _col7 (type: int), 3 (type: int), _col2 (type: int)
@@ -941,7 +941,7 @@ STAGE PLANS:
                 outputColumnNames: _col1, _col2, _col3, _col5, _col6, _col8, _col9, _col10, _col12, _col13
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                 Filter Operator
-                  predicate: ((((_col2 = _col9) and (_col1 = _col8)) and (_col3 = 3)) and (_col10 = 4)) (type: boolean)
+                  predicate: ((_col2 = _col9) and (_col1 = _col8) and (_col3 = 3) and (_col10 = 4)) (type: boolean)
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
                   Select Operator
                     expressions: _col1 (type: int), _col2 (type: int), _col5 (type: double), _col6 (type: double), _col8 (type: int), _col9 (type: int), _col12 (type: double), _col13 (type: double)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out b/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
index 5192dbb..f87308f 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
@@ -236,7 +236,7 @@ STAGE PLANS:
                   alias: src
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or ((((key + key) = 200) or ((key - 100) = 100)) or ((key = 300) and value is not null))) (type: boolean)
+                    predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or (((key + key) = 200) or ((key - 100) = 100) or ((key = 300) and value is not null))) (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: value (type: string)
@@ -249,7 +249,7 @@ STAGE PLANS:
               Forward
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100)) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
+                  predicate: (((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: count()
@@ -580,7 +580,7 @@ STAGE PLANS:
                   alias: src
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or ((((key + key) = 200) or ((key - 100) = 100)) or ((key = 300) and value is not null))) (type: boolean)
+                    predicate: ((((key + key) = 400) or (((key - 100) = 500) and value is not null)) or (((key + key) = 200) or ((key - 100) = 100) or ((key = 300) and value is not null))) (type: boolean)
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: value (type: string)
@@ -593,7 +593,7 @@ STAGE PLANS:
               Forward
                 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: ((((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100)) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
+                  predicate: (((VALUE._col0 + VALUE._col0) = 200) or ((VALUE._col0 - 100) = 100) or ((VALUE._col0 = 300) and KEY._col0 is not null)) (type: boolean)
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Group By Operator
                     aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/spark/join_cond_pushdown_unqual4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join_cond_pushdown_unqual4.q.out b/ql/src/test/results/clientpositive/spark/join_cond_pushdown_unqual4.q.out
index e16884c..b30f4f4 100644
--- a/ql/src/test/results/clientpositive/spark/join_cond_pushdown_unqual4.q.out
+++ b/ql/src/test/results/clientpositive/spark/join_cond_pushdown_unqual4.q.out
@@ -286,7 +286,7 @@ STAGE PLANS:
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44
                 Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
-                  predicate: (((_col13 = _col25) and (_col0 = _col36)) and (_col0 = _col12)) (type: boolean)
+                  predicate: ((_col13 = _col25) and (_col0 = _col36) and (_col0 = _col12)) (type: boolean)
                   Statistics: Num rows: 1 Data size: 123 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string), _col36 (type: int), _col37 (type: string), _col38 (type: string), _col39 (type: string), _col40 (type: string), _col41 (type: int), _col42 (type: string), _col43 (type: double), _col44 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/spark/ppd_gby_join.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/ppd_gby_join.q.out b/ql/src/test/results/clientpositive/spark/ppd_gby_join.q.out
index b3ebea9..306292a 100644
--- a/ql/src/test/results/clientpositive/spark/ppd_gby_join.q.out
+++ b/ql/src/test/results/clientpositive/spark/ppd_gby_join.q.out
@@ -47,7 +47,7 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
-                        predicate: ((_col0 > '20') and (((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400'))) (type: boolean)
+                        predicate: ((_col0 > '20') and ((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400')) (type: boolean)
                         Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: string)
@@ -80,7 +80,7 @@ STAGE PLANS:
                         predicate: (_col0 <> '4') (type: boolean)
                         Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                         Filter Operator
-                          predicate: ((_col0 > '1') and ((_col0 > '20') and (_col0 < '400'))) (type: boolean)
+                          predicate: ((_col0 > '1') and (_col0 > '20') and (_col0 < '400')) (type: boolean)
                           Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                           Filter Operator
                             predicate: _col0 is not null (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/7f3e4811/ql/src/test/results/clientpositive/spark/ppd_join.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/ppd_join.q.out b/ql/src/test/results/clientpositive/spark/ppd_join.q.out
index 42a83f3..aee7630 100644
--- a/ql/src/test/results/clientpositive/spark/ppd_join.q.out
+++ b/ql/src/test/results/clientpositive/spark/ppd_join.q.out
@@ -44,7 +44,7 @@ STAGE PLANS:
                       outputColumnNames: _col0, _col1
                       Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
-                        predicate: ((_col0 > '20') and (((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400'))) (type: boolean)
+                        predicate: ((_col0 > '20') and ((_col1 < 'val_50') or (_col0 > '2')) and (_col0 < '400')) (type: boolean)
                         Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col0 (type: string)
@@ -77,7 +77,7 @@ STAGE PLANS:
                         predicate: (_col0 <> '4') (type: boolean)
                         Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE
                         Filter Operator
-                          predicate: ((_col0 > '1') and ((_col0 > '20') and (_col0 < '400'))) (type: boolean)
+                          predicate: ((_col0 > '1') and (_col0 > '20') and (_col0 < '400')) (type: boolean)
                           Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                           Filter Operator
                             predicate: _col0 is not null (type: boolean)