You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2009/06/19 03:53:30 UTC

svn commit: r786352 [1/2] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/java/org/apache/hadoop/hive/ql/udf/ ql/src/java/org/apache...

Author: zshao
Date: Fri Jun 19 01:53:28 2009
New Revision: 786352

URL: http://svn.apache.org/viewvc?rev=786352&view=rev
Log:
HIVE-521. Move size, if, isnull, isnotnull to GenericUDF. (Min Zhou via zshao)

Added:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentLengthException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_not_bool.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_args_len.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_type.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_args_len.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_type.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_if.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_size.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_not_bool.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_type.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_type.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_size.q.out
Removed:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFIf.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf6.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_coalesce.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Jun 19 01:53:28 2009
@@ -75,6 +75,9 @@
 
     HIVE-542. UDF: Faster String Like. (Yuntao Jia via zshao)
 
+    HIVE-521. Move size, if, isnull, isnotnull to GenericUDF.
+    (Min Zhou via zshao)
+
   OPTIMIZATIONS
 
     HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo).

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Fri Jun 19 01:53:28 2009
@@ -52,7 +52,7 @@
     registerUDF("concat", UDFConcat.class, OperatorType.PREFIX, false);
     registerUDF("substr", UDFSubstr.class, OperatorType.PREFIX, false);
 
-    registerUDF("size", UDFSize.class, OperatorType.PREFIX, false);
+    registerGenericUDF("size", GenericUDFSize.class);
 
     registerUDF("round", UDFRound.class, OperatorType.PREFIX, false);
     registerUDF("floor", UDFFloor.class, OperatorType.PREFIX, false);
@@ -128,10 +128,10 @@
     registerUDF("not", UDFOPNot.class, OperatorType.PREFIX, true);
     registerUDF("!", UDFOPNot.class, OperatorType.PREFIX, true, "not");
 
-    registerUDF("isnull", UDFOPNull.class, OperatorType.POSTFIX, true, "is null");
-    registerUDF("isnotnull", UDFOPNotNull.class, OperatorType.POSTFIX, true, "is not null");
+    registerGenericUDF("isnull", GenericUDFOPNull.class);
+    registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);
 
-    registerUDF("if", UDFIf.class, OperatorType.PREFIX, true);
+    registerGenericUDF("if", GenericUDFIf.class);
 
     // Aliases for Java Class Names
     // These are used in getImplicitConvertUDFMethod

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java Fri Jun 19 01:53:28 2009
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**                                                                                     
+ * exception class, thrown when udf argument have something wrong.
+ */
+public class UDFArgumentException extends HiveException {
+
+  public UDFArgumentException() {
+    super();
+  }
+  
+  public UDFArgumentException(String message) {
+    super(message);
+  }
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentLengthException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentLengthException.java?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentLengthException.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentLengthException.java Fri Jun 19 01:53:28 2009
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+/**
+ * exception class, thrown when udf arguments have wrong length.
+ */
+public class UDFArgumentLengthException extends UDFArgumentException {
+  public UDFArgumentLengthException(String message) {
+    super(message);
+  }
+}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java Fri Jun 19 01:53:28 2009
@@ -18,13 +18,12 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**                                                                                     
- * Generic exception class for Hive
+ * exception class, thrown when udf arguments have wrong types.
  */
 
-public class UDFArgumentTypeException extends HiveException {
+public class UDFArgumentTypeException extends UDFArgumentException {
   
   int argumentId;
   
@@ -41,4 +40,3 @@
     return argumentId;
   }
 }
-

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Fri Jun 19 01:53:28 2009
@@ -38,6 +38,8 @@
   INVALID_FUNCTION("Invalid Function"),
   INVALID_FUNCTION_SIGNATURE("Function Argument Type Mismatch"),
   INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"),
+  INVALID_ARGUMENT("Wrong Arguments"),
+  INVALID_ARGUMENT_LENGTH("Arguments Length Mismatch"),
   INVALID_ARGUMENT_TYPE("Argument Type Mismatch"),
   INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"),
   INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"),

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java Fri Jun 19 01:53:28 2009
@@ -24,6 +24,8 @@
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -252,6 +254,12 @@
           } catch (UDFArgumentTypeException e) {
             throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_TYPE
                 .getMsg(expr.getChild(childrenBegin + e.getArgumentId()), e.getMessage()));
+          } catch (UDFArgumentLengthException e) {
+            throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_LENGTH
+                .getMsg(expr, e.getMessage()));
+          } catch (UDFArgumentException e) {
+            throw new SemanticException(ErrorMsg.INVALID_ARGUMENT
+                .getMsg(expr, e.getMessage()));
           }
           
           if (desc instanceof exprNodeFuncDesc && (

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Fri Jun 19 01:53:28 2009
@@ -31,6 +31,8 @@
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
@@ -413,12 +415,12 @@
      * @param name
      * @param children
      * @return The expression node descriptor
-     * @throws UDFArgumentTypeException 
+     * @throws UDFArgumentException 
      */
     public static exprNodeDesc getFuncExprNodeDesc(String name, exprNodeDesc... children) {
       try {
         return getFuncExprNodeDesc(name, Arrays.asList(children));
-      } catch (UDFArgumentTypeException e) {
+      } catch (UDFArgumentException e) {
         throw new RuntimeException("Hive 2 internal error", e);
       }
     }
@@ -429,7 +431,7 @@
      * @throws SemanticException 
      */
     public static exprNodeDesc getFuncExprNodeDesc(String udfName, List<exprNodeDesc> children)
-        throws UDFArgumentTypeException {
+        throws UDFArgumentException {
 
       FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName);
       if (fi == null) return null;
@@ -471,7 +473,7 @@
 
     static exprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, boolean isFunction,
         ArrayList<exprNodeDesc> children)
-        throws SemanticException, UDFArgumentTypeException {
+        throws SemanticException, UDFArgumentException {
       // return the child directly if the conversion is redundant.
       if (isRedundantConversionFunction(expr, isFunction, children)) {
         assert(children.size() == 1);
@@ -654,6 +656,12 @@
       } catch (UDFArgumentTypeException e) {
         throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_TYPE
             .getMsg(expr.getChild(childrenBegin + e.getArgumentId()), e.getMessage()));
+      } catch (UDFArgumentLengthException e) {
+        throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_LENGTH
+            .getMsg(expr, e.getMessage()));
+      } catch (UDFArgumentException e) {
+        throw new SemanticException(ErrorMsg.INVALID_ARGUMENT
+            .getMsg(expr, e.getMessage()));
       }
     }
     

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java Fri Jun 19 01:53:28 2009
@@ -25,7 +25,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
@@ -121,10 +121,10 @@
   /**
    * Create a exprNodeGenericFuncDesc based on the genericUDFClass and the children
    * parameters.
-   * @throws UDFArgumentTypeException
+   * @throws UDFArgumentException
    */
   public static exprNodeGenericFuncDesc newInstance(Class<? extends GenericUDF> genericUDFClass, 
-      List<exprNodeDesc> children) throws UDFArgumentTypeException {
+      List<exprNodeDesc> children) throws UDFArgumentException {
     ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
     for(int i=0; i<childrenOIs.length; i++) {
       childrenOIs[i] = TypeInfoUtils.getStandardObjectInspectorFromTypeInfo(

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Fri Jun 19 01:53:28 2009
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -59,12 +59,12 @@
    * GenericUDF instance.
    * 
    * @param arguments     The ObjectInspector for the arguments
-   * @throws UDFArgumentTypeException
-   *                      Thrown when arguments have wrong types
+   * @throws UDFArgumentException
+   *                      Thrown when arguments have wrong types, wrong length, etc.
    * @return              The ObjectInspector for the return value
    */
   public abstract ObjectInspector initialize(ObjectInspector[] arguments) 
-      throws UDFArgumentTypeException;
+      throws UDFArgumentException;
   
   /**
    * Evaluate the GenericUDF with the arguments.

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java Fri Jun 19 01:53:28 2009
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+
+/**
+ * IF(expr1,expr2,expr3) <br>
+ * If expr1 is TRUE (expr1 <> 0 and expr1 <> NULL) then IF() returns expr2; otherwise it returns expr3. 
+ * IF() returns a numeric or string value, depending on the context in which it is used. 
+ */
+public class GenericUDFIf extends GenericUDF {
+
+  ObjectInspector[] argumentOIs;
+  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+    this.argumentOIs = arguments;
+    returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
+
+    if (arguments.length != 3) {
+      throw new UDFArgumentLengthException(
+          "The function IF(expr1,expr2,expr3) accepts exactly 3 arguments.");
+    }
+
+    boolean conditionTypeIsOk = (arguments[0].getCategory() == ObjectInspector.Category.PRIMITIVE);
+    if (conditionTypeIsOk) {
+      PrimitiveObjectInspector poi = ((PrimitiveObjectInspector)arguments[0]);
+      conditionTypeIsOk = (poi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN
+                           || poi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.VOID);
+    }
+    if (!conditionTypeIsOk) {
+      throw new UDFArgumentTypeException(0,
+          "The first argument of function IF should be \"" + Constants.BOOLEAN_TYPE_NAME
+          + "\", but \"" + arguments[0].getTypeName() + "\" is found");
+    }
+
+    if( !(returnOIResolver.update(arguments[1]) 
+         && returnOIResolver.update(arguments[2])) ) {
+      throw new UDFArgumentTypeException(2,
+          "The second and the third arguments of function IF should have the same type, " +
+          "but they are different: \"" + arguments[1].getTypeName() 
+          + "\" and \"" + arguments[2].getTypeName() + "\"");
+    }
+
+    return returnOIResolver.get();
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object condition = arguments[0].get();
+    if(condition != null && ((BooleanObjectInspector)argumentOIs[0]).get(condition)) {
+      return returnOIResolver.convertIfNecessary(arguments[1].get(),
+          argumentOIs[1]);
+    } else {
+      return returnOIResolver.convertIfNecessary(arguments[2].get(),
+          argumentOIs[2]);
+    }
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 3);
+    StringBuilder sb = new StringBuilder();
+    sb.append("if(");
+    sb.append(children[0]).append(", ");
+    sb.append(children[1]).append(", ");
+    sb.append(children[2]).append(")");
+    return sb.toString();
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java Fri Jun 19 01:53:28 2009
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.BooleanWritable;
+
+public class GenericUDFOPNotNull extends GenericUDF {
+
+  BooleanWritable result = new BooleanWritable();
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+    if (arguments.length != 1) {
+      throw new UDFArgumentLengthException(
+          "The operator 'IS NOT NULL' only accepts 1 argument.");
+    }
+    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    result.set(arguments[0].get() != null);
+    return result;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    return children[0] + " is not null";
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java Fri Jun 19 01:53:28 2009
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.BooleanWritable;
+
+public class GenericUDFOPNull extends GenericUDF {
+
+  BooleanWritable result = new BooleanWritable();
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+    if (arguments.length != 1) {
+      throw new UDFArgumentLengthException(
+          "The operator 'IS NULL' only accepts 1 argument.");
+    }
+    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    result.set(arguments[0].get() == null);
+    return result;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert (children.length == 1);
+    return children[0] + " is null";
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java Fri Jun 19 01:53:28 2009
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.IntWritable;
+
+public class GenericUDFSize extends GenericUDF {
+  private ObjectInspector returnOI;
+  private IntWritable result = new IntWritable(-1);
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentException {
+    if (arguments.length != 1) {
+      throw new UDFArgumentLengthException(
+          "The functin SIZE only accepts 1 argument.");
+    }
+    Category category = arguments[0].getCategory();
+    String typeName = arguments[0].getTypeName();
+    if (category != Category.MAP 
+        && category != Category.LIST 
+        && !typeName.equals(Constants.VOID_TYPE_NAME)) {
+      throw new UDFArgumentTypeException(0 ,
+          "\"" + Category.MAP.toString().toLowerCase() 
+          + "\" or \"" + Category.LIST.toString().toLowerCase()  + "\" is expected at function SIZE, "
+          + "but \"" + arguments[0].getTypeName() + "\" is found");
+    }
+    
+    returnOI = arguments[0];
+    return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object data = arguments[0].get();
+    if(returnOI.getCategory() == Category.MAP){
+      result.set(((MapObjectInspector) returnOI).getMapSize(data));
+    } else if(returnOI.getCategory() == Category.LIST){
+      result.set(((ListObjectInspector) returnOI).getListLength(data));
+    } else if(returnOI.getTypeName().equals(Constants.VOID_TYPE_NAME)) {
+      // null
+      result.set(-1);
+    }
+    return result;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert(children.length == 1);
+    return "size(" + children[0] + ")";
+  }
+}

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_not_bool.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_not_bool.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_not_bool.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_not_bool.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+SELECT IF('STRING', 1, 1) FROM src;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_args_len.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_args_len.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_args_len.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_args_len.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+SELECT IF(TRUE, 1) FROM src;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_type.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_type.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_type.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_if_wrong_type.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+SELECT IF(1=1, 'STRING', 1) FROM src;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_args_len.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_args_len.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_args_len.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_args_len.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1,5 @@
+FROM src_thrift
+SELECT size(src_thrift.lint, src_thrift.lintstring), 
+       size()
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_type.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_type.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_type.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_size_wrong_type.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+SELECT SIZE('wrong type: string') FROM src;

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf6.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf6.q?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf6.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf6.q Fri Jun 19 01:53:28 2009
@@ -9,14 +9,15 @@
 
 EXPLAIN
 SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"),
-       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), 2),
-       IF(FALSE, CAST(127 AS TINYINT), 128), IF(FALSE, CAST(127 AS TINYINT),
-       CAST(128 AS SMALLINT)), CAST(128 AS INT), CAST(1.0 AS DOUBLE),
+       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), CAST(2 AS BIGINT)),
+       IF(FALSE, CAST(127 AS TINYINT), CAST(126 AS TINYINT)),
+       IF(FALSE, CAST(127 AS SMALLINT), CAST(128 AS SMALLINT)),
+       CAST(128 AS INT), CAST(1.0 AS DOUBLE),
        CAST('128' AS STRING) FROM dest1;
 
 SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"),
-       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), 2),
-       IF(FALSE, CAST(127 AS TINYINT), 128), IF(FALSE, CAST(127 AS TINYINT),
-       CAST(128 AS SMALLINT)), CAST(128 AS INT), CAST(1.0 AS DOUBLE),
+       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), CAST(2 AS BIGINT)),
+       IF(FALSE, CAST(127 AS TINYINT), CAST(126 AS TINYINT)),
+       IF(FALSE, CAST(127 AS SMALLINT), CAST(128 AS SMALLINT)),
+       CAST(128 AS INT), CAST(1.0 AS DOUBLE),
        CAST('128' AS STRING) FROM dest1;
-

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_if.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_if.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_if.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_if.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1,18 @@
+EXPLAIN
+SELECT IF(TRUE, 1, 2) AS COL1,
+       IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,
+       IF(1=1, IF(2=2, 1, 2), IF(3=3, 3, 4)) AS COL3,
+       IF(2=2, 1, NULL) AS COL4,
+       IF(2=2, NULL, 1) AS COL5,
+       IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6
+FROM src LIMIT 1;
+
+
+SELECT IF(TRUE, 1, 2) AS COL1,
+       IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,
+       IF(1=1, IF(2=2, 1, 2), IF(3=3, 3, 4)) AS COL3,
+       IF(2=2, 1, NULL) AS COL4,
+       IF(2=2, NULL, 1) AS COL5,
+       IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6
+FROM src LIMIT 1;
+

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_isnull_isnotnull.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1,30 @@
+EXPLAIN
+SELECT NULL IS NULL,
+       1 IS NOT NULL, 
+       'my string' IS NOT NULL
+FROM src
+WHERE true IS NOT NULL LIMIT 1;
+
+
+SELECT NULL IS NULL,
+       1 IS NOT NULL, 
+       'my string' IS NOT NULL
+FROM src
+WHERE true IS NOT NULL LIMIT 1;
+
+
+EXPLAIN
+FROM src_thrift
+SELECT src_thrift.lint IS NOT NULL, 
+       src_thrift.lintstring IS NOT NULL, 
+       src_thrift.mstringstring IS NOT NULL
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1;
+
+
+FROM src_thrift
+SELECT src_thrift.lint IS NOT NULL, 
+       src_thrift.lintstring IS NOT NULL, 
+       src_thrift.mstringstring IS NOT NULL
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_size.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_size.q?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_size.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_size.q Fri Jun 19 01:53:28 2009
@@ -0,0 +1,17 @@
+EXPLAIN
+FROM src_thrift
+SELECT size(src_thrift.lint), 
+       size(src_thrift.lintstring), 
+       size(src_thrift.mstringstring),
+       size(null)
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1;
+
+
+FROM src_thrift
+SELECT size(src_thrift.lint), 
+       size(src_thrift.lintstring), 
+       size(src_thrift.mstringstring),
+       size(null)
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_not_bool.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_not_bool.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_not_bool.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_not_bool.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 1:10 Argument Type Mismatch 'STRING': The first argument of function IF should be "boolean", but "string" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 1:7 Arguments Length Mismatch 1: The function IF(expr1,expr2,expr3) accepts exactly 3 arguments.

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_type.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_type.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_type.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_if_wrong_type.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 1:25 Argument Type Mismatch 1: The second and the third arguments of function IF should have the same type, but they are different: "string" and "int"

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:7 Arguments Length Mismatch lintstring: The functin SIZE only accepts 1 argument.

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_type.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_type.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_type.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_size_wrong_type.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 1:12 Argument Type Mismatch 'wrong type: string': "map" or "list" is expected at function SIZE, but "string" is found

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf6.q.out Fri Jun 19 01:53:28 2009
@@ -18,7 +18,7 @@
         dest1 
             Select Operator
               expressions:
-                    expr: if true, 1, 2
+                    expr: if(true, 1, 2)
                     type: int
               File Output Operator
                 compressed: false
@@ -34,16 +34,17 @@
 
 query: SELECT IF(TRUE, 1, 2) FROM dest1
 Input: default/dest1
-Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/923424840/10000
+Output: file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/1130815402/10000
 1
 query: EXPLAIN
 SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"),
-       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), 2),
-       IF(FALSE, CAST(127 AS TINYINT), 128), IF(FALSE, CAST(127 AS TINYINT),
-       CAST(128 AS SMALLINT)), CAST(128 AS INT), CAST(1.0 AS DOUBLE),
+       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), CAST(2 AS BIGINT)),
+       IF(FALSE, CAST(127 AS TINYINT), CAST(126 AS TINYINT)),
+       IF(FALSE, CAST(127 AS SMALLINT), CAST(128 AS SMALLINT)),
+       CAST(128 AS INT), CAST(1.0 AS DOUBLE),
        CAST('128' AS STRING) FROM dest1
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TOK_NULL 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TRUE "a" "b")) (TOK_SELEXPR (TOK_FUNCTION IF TRUE 0.1 0.2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_BIGINT 1) 2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_TINYINT 127) 128)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_TINYINT 127) (TOK_FUNCTION TOK_SMALLINT 128))) (TOK_SELEXPR (TOK_FUNCTION TOK_INT 128)) (TOK_SELEXPR (TOK_FUNCTION TOK_DOUBLE 1.0)) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING '128')))))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF dest1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TOK_NULL 1 2)) (TOK_SELEXPR (TOK_FUNCTION IF TRUE "a" "b")) (TOK_SELEXPR (TOK_FUNCTION IF TRUE 0.1 0.2)) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_BIGINT 1) (TOK_FUNCTION TOK_BIGINT 2))) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_TINYINT 127) (TOK_FUNCTION TOK_TINYINT 126))) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_SMALLINT 127) (TOK_FUNCTION TOK_SMALLINT 128))) (TOK_SELEXPR (TOK_FUNCTION TOK_INT 128)) (TOK_SELEXPR (TOK_FUNCTION TOK_DOUBLE 1.0)) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING '128')))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -56,21 +57,21 @@
         dest1 
             Select Operator
               expressions:
-                    expr: if true, 1, 2
+                    expr: if(true, 1, 2)
                     type: int
-                    expr: if false, 1, 2
+                    expr: if(false, 1, 2)
                     type: int
-                    expr: if null, 1, 2
+                    expr: if(null, 1, 2)
                     type: int
-                    expr: if true, 'a', 'b'
+                    expr: if(true, 'a', 'b')
                     type: string
-                    expr: if true, 0.1, 0.2
+                    expr: if(true, 0.1, 0.2)
                     type: double
-                    expr: if false, UDFToLong(1), UDFToLong(2)
+                    expr: if(false, UDFToLong(1), UDFToLong(2))
                     type: bigint
-                    expr: if false, UDFToInteger(UDFToByte(127)), 128
-                    type: int
-                    expr: if false, UDFToShort(UDFToByte(127)), UDFToShort(128)
+                    expr: if(false, UDFToByte(127), UDFToByte(126))
+                    type: tinyint
+                    expr: if(false, UDFToShort(127), UDFToShort(128))
                     type: smallint
                     expr: 128
                     type: int
@@ -91,10 +92,11 @@
 
 
 query: SELECT IF(TRUE, 1, 2), IF(FALSE, 1, 2), IF(NULL, 1, 2), IF(TRUE, "a", "b"),
-       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), 2),
-       IF(FALSE, CAST(127 AS TINYINT), 128), IF(FALSE, CAST(127 AS TINYINT),
-       CAST(128 AS SMALLINT)), CAST(128 AS INT), CAST(1.0 AS DOUBLE),
+       IF(TRUE, 0.1, 0.2), IF(FALSE, CAST(1 AS BIGINT), CAST(2 AS BIGINT)),
+       IF(FALSE, CAST(127 AS TINYINT), CAST(126 AS TINYINT)),
+       IF(FALSE, CAST(127 AS SMALLINT), CAST(128 AS SMALLINT)),
+       CAST(128 AS INT), CAST(1.0 AS DOUBLE),
        CAST('128' AS STRING) FROM dest1
 Input: default/dest1
-Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1340342481/10000
-1	2	2	a	0.1	2	128	128	128	1.0	128
+Output: file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/1171008206/10000
+1	2	2	a	0.1	2	126	128	128	1.0	128

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_coalesce.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_coalesce.q.out?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_coalesce.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_coalesce.q.out Fri Jun 19 01:53:28 2009
@@ -66,7 +66,7 @@
                     type: double
                     expr: COALESCE(2.0,null,3.0)
                     type: double
-                    expr: COALESCE(if true, null, 0,null)
+                    expr: COALESCE(if(true, null, 0),null)
                     type: int
               Limit
                 File Output Operator
@@ -101,7 +101,7 @@
        COALESCE(IF(TRUE, NULL, 0), NULL)
 FROM src LIMIT 1
 Input: default/src
-Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1424226650/10000
+Output: file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/981408434/10000
 1	1	2	1	3	4	1	1	2	1	3	4	1.0	1.0	2.0	2.0	2.0	NULL
 query: EXPLAIN
 SELECT COALESCE(src_thrift.lint[1], 999),
@@ -145,7 +145,7 @@
        COALESCE(src_thrift.mstringstring['key_2'], '999')
 FROM src_thrift
 Input: default/src_thrift
-Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/300664928/10000
+Output: file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/1429922481/10000
 0	0	999
 2	1	999
 4	8	value_2

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_if.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1,57 @@
+query: EXPLAIN
+SELECT IF(TRUE, 1, 2) AS COL1,
+       IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,
+       IF(1=1, IF(2=2, 1, 2), IF(3=3, 3, 4)) AS COL3,
+       IF(2=2, 1, NULL) AS COL4,
+       IF(2=2, NULL, 1) AS COL5,
+       IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6
+FROM src LIMIT 1
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION IF TRUE 1 2) COL1) (TOK_SELEXPR (TOK_FUNCTION IF FALSE (TOK_FUNCTION TOK_STRING TOK_NULL) (TOK_FUNCTION TOK_STRING 1)) COL2) (TOK_SELEXPR (TOK_FUNCTION IF (= 1 1) (TOK_FUNCTION IF (= 2 2) 1 2) (TOK_FUNCTION IF (= 3 3) 3 4)) COL3) (TOK_SELEXPR (TOK_FUNCTION IF (= 2 2) 1 TOK_NULL) COL4) (TOK_SELEXPR (TOK_FUNCTION IF (= 2 2) TOK_NULL 1) COL5) (TOK_SELEXPR (TOK_FUNCTION IF (TOK_FUNCTION IF TRUE TOK_NULL FALSE) 1 2) COL6)) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Select Operator
+              expressions:
+                    expr: if(true, 1, 2)
+                    type: int
+                    expr: if(false, UDFToString(null), UDFToString(1))
+                    type: string
+                    expr: if((1 = 1), if((2 = 2), 1, 2), if((3 = 3), 3, 4))
+                    type: int
+                    expr: if((2 = 2), 1, null)
+                    type: int
+                    expr: if((2 = 2), null, 1)
+                    type: int
+                    expr: if(if(true, null, false), 1, 2)
+                    type: int
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+query: SELECT IF(TRUE, 1, 2) AS COL1,
+       IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,
+       IF(1=1, IF(2=2, 1, 2), IF(3=3, 3, 4)) AS COL3,
+       IF(2=2, 1, NULL) AS COL4,
+       IF(2=2, NULL, 1) AS COL5,
+       IF(IF(TRUE, NULL, FALSE), 1, 2) AS COL6
+FROM src LIMIT 1
+Input: default/src
+Output: file:/home/zhoumin/hive-trunk/build/ql/tmp/293930288/10000
+1	1	1	1	NULL	2

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1,104 @@
+query: EXPLAIN
+SELECT NULL IS NULL,
+       1 IS NOT NULL, 
+       'my string' IS NOT NULL
+FROM src
+WHERE true IS NOT NULL LIMIT 1
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_ISNULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL 1)) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL 'my string'))) (TOK_WHERE (TOK_FUNCTION TOK_ISNOTNULL true)) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+            Filter Operator
+              predicate:
+                  expr: true is not null
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: null is null
+                      type: boolean
+                      expr: 1 is not null
+                      type: boolean
+                      expr: 'my string' is not null
+                      type: boolean
+                Limit
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+query: SELECT NULL IS NULL,
+       1 IS NOT NULL, 
+       'my string' IS NOT NULL
+FROM src
+WHERE true IS NOT NULL LIMIT 1
+Input: default/src
+Output: file:/home/zhoumin/hive-trunk/build/ql/tmp/1515895443/10000
+true	true	true
+query: EXPLAIN
+FROM src_thrift
+SELECT src_thrift.lint IS NOT NULL, 
+       src_thrift.lintstring IS NOT NULL, 
+       src_thrift.mstringstring IS NOT NULL
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint))) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lintstring))) (TOK_SELEXPR (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring)))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint)) (NOT (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring))))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src_thrift 
+            Filter Operator
+              predicate:
+                  expr: (lint is not null and not mstringstring is null)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: lint is not null
+                      type: boolean
+                      expr: lintstring is not null
+                      type: boolean
+                      expr: mstringstring is not null
+                      type: boolean
+                Limit
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+query: FROM src_thrift
+SELECT src_thrift.lint IS NOT NULL, 
+       src_thrift.lintstring IS NOT NULL, 
+       src_thrift.mstringstring IS NOT NULL
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1
+Input: default/src_thrift
+Output: file:/home/zhoumin/hive-trunk/build/ql/tmp/302479509/10000
+true	true	true

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_size.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_size.q.out?rev=786352&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_size.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_size.q.out Fri Jun 19 01:53:28 2009
@@ -0,0 +1,57 @@
+query: EXPLAIN
+FROM src_thrift
+SELECT size(src_thrift.lint), 
+       size(src_thrift.lintstring), 
+       size(src_thrift.mstringstring),
+       size(null)
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) lint))) (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) lintstring))) (TOK_SELEXPR (TOK_FUNCTION size (. (TOK_TABLE_OR_COL src_thrift) mstringstring))) (TOK_SELEXPR (TOK_FUNCTION size TOK_NULL))) (TOK_WHERE (AND (TOK_FUNCTION TOK_ISNOTNULL (. (TOK_TABLE_OR_COL src_thrift) lint)) (NOT (TOK_FUNCTION TOK_ISNULL (. (TOK_TABLE_OR_COL src_thrift) mstringstring))))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src_thrift 
+            Filter Operator
+              predicate:
+                  expr: (lint is not null and not mstringstring is null)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: size(lint)
+                      type: int
+                      expr: size(lintstring)
+                      type: int
+                      expr: size(mstringstring)
+                      type: int
+                      expr: size(null)
+                      type: int
+                Limit
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+query: FROM src_thrift
+SELECT size(src_thrift.lint), 
+       size(src_thrift.lintstring), 
+       size(src_thrift.mstringstring),
+       size(null)
+WHERE  src_thrift.lint IS NOT NULL 
+       AND NOT (src_thrift.mstringstring IS NULL) LIMIT 1
+Input: default/src_thrift
+Output: file:/home/zhoumin/hive-trunk/build/ql/tmp/639668664/10000
+3	1	1	-1

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml Fri Jun 19 01:53:28 2009
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="UTF-8"?> 
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder"> 
+<java version="1.6.0_07" class="java.beans.XMLDecoder"> 
  <object id="MapRedTask0" class="org.apache.hadoop.hive.ql.exec.MapRedTask"> 
   <void property="childTasks"> 
    <object class="java.util.ArrayList"> 
@@ -31,7 +31,7 @@
              <boolean>true</boolean> 
             </void> 
             <void property="sourceDir"> 
-             <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/373765936/10000</string> 
+             <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/1679952334/10000</string> 
             </void> 
             <void property="table"> 
              <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -84,7 +84,7 @@
                 </void> 
                 <void method="put"> 
                  <string>location</string> 
-                 <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/dest1</string> 
+                 <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/dest1</string> 
                 </void> 
                </object> 
               </void> 
@@ -94,7 +94,7 @@
              </object> 
             </void> 
             <void property="tmpDir"> 
-             <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/373765936/10001</string> 
+             <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/1679952334/10001</string> 
             </void> 
            </object> 
           </void> 
@@ -169,7 +169,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src1</string> 
+            <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src1</string> 
            </void> 
           </object> 
          </void> 
@@ -204,7 +204,7 @@
                        <int>1</int> 
                       </void> 
                       <void property="dirName"> 
-                       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/373765936/10000</string> 
+                       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/1679952334/10000</string> 
                       </void> 
                       <void property="tableInfo"> 
                        <object idref="tableDesc0"/> 
@@ -325,20 +325,7 @@
             <void property="conf"> 
              <object class="org.apache.hadoop.hive.ql.plan.filterDesc"> 
               <void property="predicate"> 
-               <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                <void property="UDFClass"> 
-                 <class>org.apache.hadoop.hive.ql.udf.UDFOPNull</class> 
-                </void> 
-                <void property="UDFMethod"> 
-                 <object class="org.apache.hadoop.hive.ql.udf.UDFOPNull" method="getMethod"> 
-                  <string>evaluate</string> 
-                  <array class="java.lang.Class" length="1"> 
-                   <void index="0"> 
-                    <class>java.lang.Object</class> 
-                   </void> 
-                  </array> 
-                 </object> 
-                </void> 
+               <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                 <void property="childExprs"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
@@ -353,6 +340,9 @@
                   </void> 
                  </object> 
                 </void> 
+                <void property="genericUDFClass"> 
+                 <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull</class> 
+                </void> 
                 <void property="typeInfo"> 
                  <object class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
                   <void property="typeName"> 
@@ -417,7 +407,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src1</string> 
+       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src1</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src1</string> 
@@ -429,7 +419,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src1</string> 
+       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src1</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml Fri Jun 19 01:53:28 2009
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="UTF-8"?> 
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder"> 
+<java version="1.6.0_07" class="java.beans.XMLDecoder"> 
  <object class="org.apache.hadoop.hive.ql.exec.MapRedTask"> 
   <void property="id"> 
    <string>Stage-2</string> 
@@ -68,7 +68,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src_thrift</string> 
+            <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
            </void> 
           </object> 
          </void> 
@@ -100,7 +100,7 @@
                     <void property="conf"> 
                      <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                       <void property="dirName"> 
-                       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1846910349/10001</string> 
+                       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/484083875/10001</string> 
                       </void> 
                       <void property="tableInfo"> 
                        <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -186,20 +186,7 @@
                  <object class="java.util.HashMap"> 
                   <void method="put"> 
                    <string>_col2</string> 
-                   <object id="exprNodeFuncDesc0" class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                    <void property="UDFClass"> 
-                     <class>org.apache.hadoop.hive.ql.udf.UDFSize</class> 
-                    </void> 
-                    <void property="UDFMethod"> 
-                     <object class="org.apache.hadoop.hive.ql.udf.UDFSize" method="getMethod"> 
-                      <string>evaluate</string> 
-                      <array class="java.lang.Class" length="1"> 
-                       <void index="0"> 
-                        <class>java.util.Map</class> 
-                       </void> 
-                      </array> 
-                     </object> 
-                    </void> 
+                   <object id="exprNodeGenericFuncDesc0" class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                     <void property="childExprs"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
@@ -225,6 +212,9 @@
                       </void> 
                      </object> 
                     </void> 
+                    <void property="genericUDFClass"> 
+                     <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize</class> 
+                    </void> 
                     <void property="typeInfo"> 
                      <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
@@ -232,20 +222,7 @@
                   </void> 
                   <void method="put"> 
                    <string>_col1</string> 
-                   <object id="exprNodeFuncDesc1" class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                    <void property="UDFClass"> 
-                     <class>org.apache.hadoop.hive.ql.udf.UDFSize</class> 
-                    </void> 
-                    <void property="UDFMethod"> 
-                     <object class="org.apache.hadoop.hive.ql.udf.UDFSize" method="getMethod"> 
-                      <string>evaluate</string> 
-                      <array class="java.lang.Class" length="1"> 
-                       <void index="0"> 
-                        <class>java.util.List</class> 
-                       </void> 
-                      </array> 
-                     </object> 
-                    </void> 
+                   <object id="exprNodeGenericFuncDesc1" class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                     <void property="childExprs"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
@@ -264,6 +241,9 @@
                       </void> 
                      </object> 
                     </void> 
+                    <void property="genericUDFClass"> 
+                     <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize</class> 
+                    </void> 
                     <void property="typeInfo"> 
                      <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
@@ -271,20 +251,7 @@
                   </void> 
                   <void method="put"> 
                    <string>_col0</string> 
-                   <object id="exprNodeFuncDesc2" class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                    <void property="UDFClass"> 
-                     <class>org.apache.hadoop.hive.ql.udf.UDFSize</class> 
-                    </void> 
-                    <void property="UDFMethod"> 
-                     <object class="org.apache.hadoop.hive.ql.udf.UDFSize" method="getMethod"> 
-                      <string>evaluate</string> 
-                      <array class="java.lang.Class" length="1"> 
-                       <void index="0"> 
-                        <class>java.util.List</class> 
-                       </void> 
-                      </array> 
-                     </object> 
-                    </void> 
+                   <object id="exprNodeGenericFuncDesc2" class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                     <void property="childExprs"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
@@ -303,6 +270,9 @@
                       </void> 
                      </object> 
                     </void> 
+                    <void property="genericUDFClass"> 
+                     <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize</class> 
+                    </void> 
                     <void property="typeInfo"> 
                      <object idref="PrimitiveTypeInfo0"/> 
                     </void> 
@@ -315,13 +285,13 @@
                   <void property="colList"> 
                    <object class="java.util.ArrayList"> 
                     <void method="add"> 
-                     <object idref="exprNodeFuncDesc2"/> 
+                     <object idref="exprNodeGenericFuncDesc2"/> 
                     </void> 
                     <void method="add"> 
-                     <object idref="exprNodeFuncDesc1"/> 
+                     <object idref="exprNodeGenericFuncDesc1"/> 
                     </void> 
                     <void method="add"> 
-                     <object idref="exprNodeFuncDesc0"/> 
+                     <object idref="exprNodeGenericFuncDesc0"/> 
                     </void> 
                    </object> 
                   </void> 
@@ -381,20 +351,7 @@
                 <void property="childExprs"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                    <void property="UDFClass"> 
-                     <class>org.apache.hadoop.hive.ql.udf.UDFOPNotNull</class> 
-                    </void> 
-                    <void property="UDFMethod"> 
-                     <object class="org.apache.hadoop.hive.ql.udf.UDFOPNotNull" method="getMethod"> 
-                      <string>evaluate</string> 
-                      <array class="java.lang.Class" length="1"> 
-                       <void index="0"> 
-                        <class>java.lang.Object</class> 
-                       </void> 
-                      </array> 
-                     </object> 
-                    </void> 
+                   <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                     <void property="childExprs"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
@@ -409,6 +366,9 @@
                       </void> 
                      </object> 
                     </void> 
+                    <void property="genericUDFClass"> 
+                     <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull</class> 
+                    </void> 
                     <void property="typeInfo"> 
                      <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
                       <void property="typeName"> 
@@ -436,20 +396,7 @@
                     <void property="childExprs"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
-                       <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                        <void property="UDFClass"> 
-                         <class>org.apache.hadoop.hive.ql.udf.UDFOPNull</class> 
-                        </void> 
-                        <void property="UDFMethod"> 
-                         <object class="org.apache.hadoop.hive.ql.udf.UDFOPNull" method="getMethod"> 
-                          <string>evaluate</string> 
-                          <array class="java.lang.Class" length="1"> 
-                           <void index="0"> 
-                            <class>java.lang.Object</class> 
-                           </void> 
-                          </array> 
-                         </object> 
-                        </void> 
+                       <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                         <void property="childExprs"> 
                          <object class="java.util.ArrayList"> 
                           <void method="add"> 
@@ -464,6 +411,9 @@
                           </void> 
                          </object> 
                         </void> 
+                        <void property="genericUDFClass"> 
+                         <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull</class> 
+                        </void> 
                         <void property="typeInfo"> 
                          <object idref="PrimitiveTypeInfo2"/> 
                         </void> 
@@ -582,7 +532,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src_thrift</string> 
@@ -594,7 +544,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml?rev=786352&r1=786351&r2=786352&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml Fri Jun 19 01:53:28 2009
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="UTF-8"?> 
-<java version="1.6.0_03-p3" class="java.beans.XMLDecoder"> 
+<java version="1.6.0_07" class="java.beans.XMLDecoder"> 
  <object class="org.apache.hadoop.hive.ql.exec.MapRedTask"> 
   <void property="id"> 
    <string>Stage-2</string> 
@@ -64,7 +64,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src</string> 
+            <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
            </void> 
           </object> 
          </void> 
@@ -132,7 +132,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src</string> 
+            <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
            </void> 
           </object> 
          </void> 
@@ -1273,7 +1273,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>c:a:src1</string> 
@@ -1288,7 +1288,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 
@@ -1337,7 +1337,7 @@
                       <void property="conf"> 
                        <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                         <void property="dirName"> 
-                         <string>file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/358928878/10001</string> 
+                         <string>file:/data/users/zshao/tools/549-trunk-apache-hive/build/ql/tmp/551115749/10001</string> 
                         </void> 
                         <void property="tableInfo"> 
                          <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -1550,20 +1550,7 @@
                   <void property="childExprs"> 
                    <object class="java.util.ArrayList"> 
                     <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                      <void property="UDFClass"> 
-                       <class>org.apache.hadoop.hive.ql.udf.UDFOPNull</class> 
-                      </void> 
-                      <void property="UDFMethod"> 
-                       <object class="org.apache.hadoop.hive.ql.udf.UDFOPNull" method="getMethod"> 
-                        <string>evaluate</string> 
-                        <array class="java.lang.Class" length="1"> 
-                         <void index="0"> 
-                          <class>java.lang.Object</class> 
-                         </void> 
-                        </array> 
-                       </object> 
-                      </void> 
+                     <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                       <void property="childExprs"> 
                        <object class="java.util.ArrayList"> 
                         <void method="add"> 
@@ -1578,26 +1565,16 @@
                         </void> 
                        </object> 
                       </void> 
+                      <void property="genericUDFClass"> 
+                       <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull</class> 
+                      </void> 
                       <void property="typeInfo"> 
                        <object idref="PrimitiveTypeInfo3"/> 
                       </void> 
                      </object> 
                     </void> 
                     <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
-                      <void property="UDFClass"> 
-                       <class>org.apache.hadoop.hive.ql.udf.UDFOPNotNull</class> 
-                      </void> 
-                      <void property="UDFMethod"> 
-                       <object class="org.apache.hadoop.hive.ql.udf.UDFOPNotNull" method="getMethod"> 
-                        <string>evaluate</string> 
-                        <array class="java.lang.Class" length="1"> 
-                         <void index="0"> 
-                          <class>java.lang.Object</class> 
-                         </void> 
-                        </array> 
-                       </object> 
-                      </void> 
+                     <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
                       <void property="childExprs"> 
                        <object class="java.util.ArrayList"> 
                         <void method="add"> 
@@ -1612,6 +1589,9 @@
                         </void> 
                        </object> 
                       </void> 
+                      <void property="genericUDFClass"> 
+                       <class>org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull</class> 
+                      </void> 
                       <void property="typeInfo"> 
                        <object idref="PrimitiveTypeInfo3"/> 
                       </void>