You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2017/04/03 18:43:50 UTC

hive git commit: HIVE-15923: Hive default partition causes errors in get partitions (Sergey Shelukhin, reviewed by Pengcheng Xiong, Aihua Xu)

Repository: hive
Updated Branches:
  refs/heads/master bf98700a7 -> 5bf52be60


HIVE-15923: Hive default partition causes errors in get partitions (Sergey Shelukhin, reviewed by Pengcheng Xiong, Aihua Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5bf52be6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5bf52be6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5bf52be6

Branch: refs/heads/master
Commit: 5bf52be60cb54d66133b336308344780f0a82c77
Parents: bf98700
Author: Pengcheng Xiong <px...@hortonworks.com>
Authored: Mon Apr 3 11:43:35 2017 -0700
Committer: Pengcheng Xiong <px...@hortonworks.com>
Committed: Mon Apr 3 11:43:35 2017 -0700

----------------------------------------------------------------------
 .../exec/ExprNodeConstantDefaultEvaluator.java  | 55 -------------
 .../hive/ql/exec/ExprNodeEvaluatorFactory.java  |  6 --
 .../hadoop/hive/ql/exec/FunctionRegistry.java   | 14 ++++
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |  3 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      | 27 +++++-
 .../ql/parse/ReplicationSemanticAnalyzer.java   |  5 +-
 .../ql/plan/ExprNodeConstantDefaultDesc.java    | 86 --------------------
 .../hive/ql/udf/generic/GenericUDFOPEqual.java  |  9 --
 .../ql/udf/generic/GenericUDFOPNotEqual.java    |  9 --
 .../drop_default_partition_filter.q             |  7 ++
 .../clientpositive/drop_partitions_filter4.q    | 10 +++
 .../clientpositive/partitions_filter_default.q  | 14 ++++
 .../drop_default_partition_filter.q.out         | 23 ++++++
 .../drop_partitions_filter4.q.out               | 71 ++++++++++++++++
 .../partitions_filter_default.q.out             | 67 +++++++++++++++
 15 files changed, 232 insertions(+), 174 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
deleted file mode 100644
index f53c3e3..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-
-/**
- * ExprNodeConstantEvaluator.
- *
- */
-public class ExprNodeConstantDefaultEvaluator extends ExprNodeEvaluator<ExprNodeConstantDefaultDesc> {
-
-  transient ObjectInspector writableObjectInspector;
-
-  public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr) {
-    this(expr, null);
-  }
-
-  public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr, Configuration conf) {
-    super(expr, conf);
-    writableObjectInspector = expr.getWritableObjectInspector();
-  }
-
-  @Override
-  public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException {
-    return writableObjectInspector;
-  }
-
-  @Override
-  protected Object _evaluate(Object row, int version) throws HiveException {
-    return expr;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
index 34aec55..cc40cae 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
@@ -24,7 +24,6 @@ import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicValueDesc;
@@ -50,11 +49,6 @@ public final class ExprNodeEvaluatorFactory {
       return new ExprNodeConstantEvaluator((ExprNodeConstantDesc) desc, conf);
     }
 
-    // Special 'default' constant node
-    if (desc instanceof ExprNodeConstantDefaultDesc) {
-      return new ExprNodeConstantDefaultEvaluator((ExprNodeConstantDefaultDesc) desc);
-    }
-
     // Column-reference node, e.g. a column in the input row
     if (desc instanceof ExprNodeColumnDesc) {
       return new ExprNodeColumnEvaluator((ExprNodeColumnDesc) desc, conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 9e781dd..ccfb455 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -1489,6 +1489,20 @@ public final class FunctionRegistry {
   }
 
   /**
+   * Returns whether the fn is an exact equality comparison.
+   */
+  public static boolean isEq(GenericUDF fn) {
+    return fn instanceof GenericUDFOPEqual;
+  }
+
+  /**
+   * Returns whether the fn is an exact non-equality comparison.
+   */
+  public static boolean isNeq(GenericUDF fn) {
+    return fn instanceof GenericUDFOPNotEqual;
+  }
+
+  /**
    * Returns whether the exprNodeDesc is a node of "positive".
    */
   public static boolean isOpPositive(ExprNodeDesc desc) {

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
index 655bd8a..2acfef7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
@@ -48,7 +48,6 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
@@ -554,7 +553,7 @@ public class PartitionPruner extends Transform {
         PrimitiveTypeInfo typeInfo = partColumnTypeInfos.get(i);
 
         if (partitionValue.equals(defaultPartitionName)) {
-          convertedValues.add(new ExprNodeConstantDefaultDesc(typeInfo, defaultPartitionName));
+          convertedValues.add(null); // Null for default partition.
         } else {
           Object o = ObjectInspectorConverters.getConverter(
               PrimitiveObjectInspectorFactory.javaStringObjectInspector,

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index fc13292..6e72d07 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
@@ -98,7 +99,6 @@ import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
@@ -134,6 +134,7 @@ import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -3123,8 +3124,22 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
         }
 
         ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
-        ExprNodeGenericFuncDesc op = makeBinaryPredicate(operator, column,
-            isDefaultPartitionName ? new ExprNodeConstantDefaultDesc(pti, defaultPartitionName) : new ExprNodeConstantDesc(pti, val));
+        ExprNodeGenericFuncDesc op;
+        if (!isDefaultPartitionName) {
+          op = makeBinaryPredicate(operator, column, new ExprNodeConstantDesc(pti, val));
+        } else {
+          GenericUDF originalOp = FunctionRegistry.getFunctionInfo(operator).getGenericUDF();
+          String fnName;
+          if (FunctionRegistry.isEq(originalOp)) {
+            fnName = "isnull";
+          } else if (FunctionRegistry.isNeq(originalOp)) {
+            fnName = "isnotnull";
+          } else {
+            throw new SemanticException("Cannot use " + operator
+                + " in a default partition spec; only '=' and '!=' are allowed.");
+          }
+          op = makeUnaryPredicate(fnName, column);
+        }
         // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
         expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
         names.add(key);
@@ -3152,7 +3167,11 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
           FunctionRegistry.getFunctionInfo(fn).getGenericUDF(), Lists.newArrayList(left, right));
   }
-
+  public static ExprNodeGenericFuncDesc makeUnaryPredicate(
+      String fn, ExprNodeDesc arg) throws SemanticException {
+      return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
+          FunctionRegistry.getFunctionInfo(fn).getGenericUDF(), Lists.newArrayList(arg));
+  }
   /**
    * Calculates the partition prefix length based on the drop spec.
    * This is used to avoid deleting archived partitions with lower level.

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index bee18e8..c9967e8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -1201,9 +1201,8 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer {
         ;
         PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
         ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
-        ExprNodeGenericFuncDesc op =
-            DDLSemanticAnalyzer
-                .makeBinaryPredicate("=", column, new ExprNodeConstantDesc(pti, val));
+        ExprNodeGenericFuncDesc op = DDLSemanticAnalyzer.makeBinaryPredicate(
+            "=", column, new ExprNodeConstantDesc(pti, val));
         expr = (expr == null) ? op : DDLSemanticAnalyzer.makeBinaryPredicate("and", expr, op);
       }
       if (expr != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDefaultDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDefaultDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDefaultDesc.java
deleted file mode 100644
index 8b3c0bc..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDefaultDesc.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-
-/**
- * A constant expression with default value and data type. The value is different
- * from any value of that data type. Used to represent the default partition in
- * the expression of x =/!= __HIVE_DEFAULT_PARTITION__
- */
-public class ExprNodeConstantDefaultDesc extends ExprNodeDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-  private final Object value;     // The internal value for the default
-
-  public ExprNodeConstantDefaultDesc() {
-    value = null;
-  }
-
-  public ExprNodeConstantDefaultDesc(TypeInfo typeInfo, Object value) {
-    super(typeInfo);
-    this.value = value;
-  }
-
-  @Override
-  public String toString() {
-    return "Const " + typeInfo.toString() + " default";
-  }
-
-  @Override
-  public String getExprString() {
-    return value == null ? "null" : value.toString();
-  }
-
-  @Override
-  public boolean isSame(Object o) {
-    if (!(o instanceof ExprNodeConstantDefaultDesc)) {
-      return false;
-    }
-    ExprNodeConstantDefaultDesc dest = (ExprNodeConstantDefaultDesc) o;
-    if (!typeInfo.equals(dest.getTypeInfo())) {
-      return false;
-    }
-    if (value == null) {
-      if (dest.value != null) {
-        return false;
-      }
-    } else if (!value.equals(dest.value)) {
-      return false;
-    }
-
-    return true;
-  }
-
-  @Override
-  public ExprNodeDesc clone() {
-    return new ExprNodeConstantDefaultDesc(typeInfo, value);
-  }
-
-  @Override
-  public int hashCode() {
-    int superHashCode = super.hashCode();
-    HashCodeBuilder builder = new HashCodeBuilder();
-    builder.appendSuper(superHashCode);
-    return builder.toHashCode();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
index 0002b07..b393843 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColEqualLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarEqualLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 
 /**
@@ -110,14 +109,6 @@ public class GenericUDFOPEqual extends GenericUDFBaseCompare {
       return null;
     }
 
-    // Handle 'default' constant which has a data type with special value
-    if (o0 instanceof ExprNodeConstantDefaultDesc || o1 instanceof ExprNodeConstantDefaultDesc) {
-      ExprNodeConstantDefaultDesc default0 = o0 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc)o0 : null;
-      ExprNodeConstantDefaultDesc default1 = o1 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc)o1 : null;
-      result.set(default0 != null && default1 != null && default0.isSame(default1));
-      return result;
-    }
-
     switch(compareType) {
     case COMPARE_TEXT:
       result.set(soi0.getPrimitiveWritableObject(o0).equals(

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
index 9652859..ed6aa36 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColNotEqualLongScal
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongScalarNotEqualLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 
 /**
@@ -110,14 +109,6 @@ public class GenericUDFOPNotEqual extends GenericUDFBaseCompare {
       return null;
     }
 
-    // Handle 'default' constant which has a data type with special value
-    if (o0 instanceof ExprNodeConstantDefaultDesc || o1 instanceof ExprNodeConstantDefaultDesc) {
-      ExprNodeConstantDefaultDesc default0 = o0 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc)o0 : null;
-      ExprNodeConstantDefaultDesc default1 = o1 instanceof ExprNodeConstantDefaultDesc ? (ExprNodeConstantDefaultDesc)o1 : null;
-      result.set(default0 == null || default1 == null || !default0.isSame(default1));
-      return result;
-    }
-
     switch(compareType) {
     case COMPARE_TEXT:
       result.set(!soi0.getPrimitiveWritableObject(o0).equals(

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/test/queries/clientnegative/drop_default_partition_filter.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/drop_default_partition_filter.q b/ql/src/test/queries/clientnegative/drop_default_partition_filter.q
new file mode 100644
index 0000000..847faf5
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/drop_default_partition_filter.q
@@ -0,0 +1,7 @@
+create table ptestfilter1 (a string, b int) partitioned by (c string);
+
+alter table ptestfilter1 add partition (c='US');
+show partitions ptestfilter1;
+
+alter table ptestfilter1 drop partition (c > '__HIVE_DEFAULT_PARTITION__');
+

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/test/queries/clientpositive/drop_partitions_filter4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/drop_partitions_filter4.q b/ql/src/test/queries/clientpositive/drop_partitions_filter4.q
index f0e5e19..ee6d46d 100644
--- a/ql/src/test/queries/clientpositive/drop_partitions_filter4.q
+++ b/ql/src/test/queries/clientpositive/drop_partitions_filter4.q
@@ -10,6 +10,10 @@ alter table ptestfilter drop partition(c = '__HIVE_DEFAULT_PARTITION__');
 alter table ptestfilter drop partition(c = 3.40);
 show partitions ptestfilter;
 
+INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col1', 1, null;
+alter table ptestfilter drop partition(c != '__HIVE_DEFAULT_PARTITION__');
+show partitions ptestfilter;
+
 drop table ptestfilter;
 
 create table ptestfilter (a string, b int) partitioned by (c string, d int);
@@ -24,6 +28,12 @@ alter table ptestfilter drop partition (c='Uganda', d='__HIVE_DEFAULT_PARTITION_
 alter table ptestfilter drop partition (c='Germany', d=2);
 show partitions ptestfilter;
 
+INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col2', 2, null, 2;
+INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col2', 2, null, 3;
+INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col3', 3, 'Uganda', null;
+alter table ptestfilter drop partition (d != 3);
+show partitions ptestfilter;
+
 drop table ptestfilter;
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/test/queries/clientpositive/partitions_filter_default.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/partitions_filter_default.q b/ql/src/test/queries/clientpositive/partitions_filter_default.q
new file mode 100644
index 0000000..f351d29
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/partitions_filter_default.q
@@ -0,0 +1,14 @@
+SET hive.exec.dynamic.partition.mode=nonstrict;
+
+create table ptestfilter (a string) partitioned by (c int);
+INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col1', null;
+INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col2', 5;
+show partitions ptestfilter;
+
+select * from ptestfilter;
+
+select * from ptestfilter where c between 2 and 6 ;
+
+drop table ptestfilter;
+
+

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/test/results/clientnegative/drop_default_partition_filter.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/drop_default_partition_filter.q.out b/ql/src/test/results/clientnegative/drop_default_partition_filter.q.out
new file mode 100644
index 0000000..56c210f
--- /dev/null
+++ b/ql/src/test/results/clientnegative/drop_default_partition_filter.q.out
@@ -0,0 +1,23 @@
+PREHOOK: query: create table ptestfilter1 (a string, b int) partitioned by (c string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ptestfilter1
+POSTHOOK: query: create table ptestfilter1 (a string, b int) partitioned by (c string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ptestfilter1
+PREHOOK: query: alter table ptestfilter1 add partition (c='US')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@ptestfilter1
+POSTHOOK: query: alter table ptestfilter1 add partition (c='US')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@ptestfilter1
+POSTHOOK: Output: default@ptestfilter1@c=US
+PREHOOK: query: show partitions ptestfilter1
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@ptestfilter1
+POSTHOOK: query: show partitions ptestfilter1
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@ptestfilter1
+c=US
+FAILED: SemanticException Cannot use > in a default partition spec; only '=' and '!=' are allowed.

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/test/results/clientpositive/drop_partitions_filter4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/drop_partitions_filter4.q.out b/ql/src/test/results/clientpositive/drop_partitions_filter4.q.out
index 0e6d41a..ba69b6d 100644
--- a/ql/src/test/results/clientpositive/drop_partitions_filter4.q.out
+++ b/ql/src/test/results/clientpositive/drop_partitions_filter4.q.out
@@ -62,6 +62,31 @@ POSTHOOK: query: show partitions ptestfilter
 POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@ptestfilter
 c=5.55
+PREHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col1', 1, null
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col1', 1, null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__).a SIMPLE []
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__).b SIMPLE []
+PREHOOK: query: alter table ptestfilter drop partition(c != '__HIVE_DEFAULT_PARTITION__')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: default@ptestfilter
+PREHOOK: Output: default@ptestfilter@c=5.55
+POSTHOOK: query: alter table ptestfilter drop partition(c != '__HIVE_DEFAULT_PARTITION__')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: default@ptestfilter
+POSTHOOK: Output: default@ptestfilter@c=5.55
+PREHOOK: query: show partitions ptestfilter
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@ptestfilter
+POSTHOOK: query: show partitions ptestfilter
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@ptestfilter
+c=__HIVE_DEFAULT_PARTITION__
 PREHOOK: query: drop table ptestfilter
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@ptestfilter
@@ -157,6 +182,52 @@ PREHOOK: Input: default@ptestfilter
 POSTHOOK: query: show partitions ptestfilter
 POSTHOOK: type: SHOWPARTITIONS
 POSTHOOK: Input: default@ptestfilter
+PREHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col2', 2, null, 2
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col2', 2, null, 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__/d=2
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__,d=2).a SIMPLE []
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__,d=2).b SIMPLE []
+PREHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col2', 2, null, 3
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col2', 2, null, 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__/d=3
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__,d=3).a SIMPLE []
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__,d=3).b SIMPLE []
+PREHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col3', 3, 'Uganda', null
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c,d) select 'Col3', 3, 'Uganda', null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ptestfilter@c=Uganda/d=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: ptestfilter PARTITION(c=Uganda,d=__HIVE_DEFAULT_PARTITION__).a SIMPLE []
+POSTHOOK: Lineage: ptestfilter PARTITION(c=Uganda,d=__HIVE_DEFAULT_PARTITION__).b SIMPLE []
+PREHOOK: query: alter table ptestfilter drop partition (d != 3)
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: default@ptestfilter
+PREHOOK: Output: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__/d=2
+POSTHOOK: query: alter table ptestfilter drop partition (d != 3)
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: default@ptestfilter
+POSTHOOK: Output: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__/d=2
+PREHOOK: query: show partitions ptestfilter
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@ptestfilter
+POSTHOOK: query: show partitions ptestfilter
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@ptestfilter
+c=Uganda/d=__HIVE_DEFAULT_PARTITION__
+c=__HIVE_DEFAULT_PARTITION__/d=3
 PREHOOK: query: drop table ptestfilter
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@ptestfilter

http://git-wip-us.apache.org/repos/asf/hive/blob/5bf52be6/ql/src/test/results/clientpositive/partitions_filter_default.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partitions_filter_default.q.out b/ql/src/test/results/clientpositive/partitions_filter_default.q.out
new file mode 100644
index 0000000..d24c4fc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/partitions_filter_default.q.out
@@ -0,0 +1,67 @@
+PREHOOK: query: create table ptestfilter (a string) partitioned by (c int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: create table ptestfilter (a string) partitioned by (c int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ptestfilter
+PREHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col1', null
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col1', null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: ptestfilter PARTITION(c=__HIVE_DEFAULT_PARTITION__).a SIMPLE []
+PREHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col2', 5
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: INSERT OVERWRITE TABLE ptestfilter PARTITION (c) select 'Col2', 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ptestfilter@c=5
+POSTHOOK: Lineage: ptestfilter PARTITION(c=5).a SIMPLE []
+PREHOOK: query: show partitions ptestfilter
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@ptestfilter
+POSTHOOK: query: show partitions ptestfilter
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@ptestfilter
+c=5
+c=__HIVE_DEFAULT_PARTITION__
+PREHOOK: query: select * from ptestfilter
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ptestfilter
+PREHOOK: Input: default@ptestfilter@c=5
+PREHOOK: Input: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__
+#### A masked pattern was here ####
+POSTHOOK: query: select * from ptestfilter
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ptestfilter
+POSTHOOK: Input: default@ptestfilter@c=5
+POSTHOOK: Input: default@ptestfilter@c=__HIVE_DEFAULT_PARTITION__
+#### A masked pattern was here ####
+Col2	5
+Col1	NULL
+PREHOOK: query: select * from ptestfilter where c between 2 and 6
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ptestfilter
+PREHOOK: Input: default@ptestfilter@c=5
+#### A masked pattern was here ####
+POSTHOOK: query: select * from ptestfilter where c between 2 and 6
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ptestfilter
+POSTHOOK: Input: default@ptestfilter@c=5
+#### A masked pattern was here ####
+Col2	5
+PREHOOK: query: drop table ptestfilter
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@ptestfilter
+PREHOOK: Output: default@ptestfilter
+POSTHOOK: query: drop table ptestfilter
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@ptestfilter
+POSTHOOK: Output: default@ptestfilter