You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2009/12/31 00:46:17 UTC

svn commit: r894741 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/udf/ ql/src/test/org/apache/hadoop/hive/ql/exec/ ql/src/test/results/clientpositive/

Author: namit
Date: Wed Dec 30 23:46:17 2009
New Revision: 894741

URL: http://svn.apache.org/viewvc?rev=894741&view=rev
Log:
HIVE-1017 || and && are not supported currently
(Ning Zhang via namit)


Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=894741&r1=894740&r2=894741&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Dec 30 23:46:17 2009
@@ -381,6 +381,9 @@
     HIVE-986 Ant "eclipse-files" target is broken for Hadoop 0.20
     (Zheng Shao via namit)
 
+    HIVE-1017 || and && are not supported currently
+    (Ning Zhang via namit)
+
 Release 0.4.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=894741&r1=894740&r2=894741&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Wed Dec 30 23:46:17 2009
@@ -157,9 +157,7 @@
     registerUDF(">=", UDFOPEqualOrGreaterThan.class, true);
 
     registerUDF("and", UDFOPAnd.class, true);
-    registerUDF("&&", UDFOPAnd.class, true, "and");
     registerUDF("or", UDFOPOr.class, true);
-    registerUDF("||", UDFOPOr.class, true, "or");
     registerUDF("not", UDFOPNot.class, true);
     registerUDF("!", UDFOPNot.class, true, "not");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java?rev=894741&r1=894740&r2=894741&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java Wed Dec 30 23:46:17 2009
@@ -25,7 +25,7 @@
 import org.apache.hadoop.io.BooleanWritable;
 
 @description(
-    name = "and,&&",
+    name = "and",
     value = "a _FUNC_ b - Logical and",
     extended = "Example:\n" +
         "  > SELECT * FROM srcpart WHERE src.hr=12 _FUNC_ " +

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java?rev=894741&r1=894740&r2=894741&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java Wed Dec 30 23:46:17 2009
@@ -25,7 +25,7 @@
 import org.apache.hadoop.io.BooleanWritable;
 
 @description(
-    name = "or,||",
+    name = "or",
     value = "a _FUNC_ b - Logical or"
 )
 public class UDFOPOr extends UDF {

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=894741&r1=894740&r2=894741&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Wed Dec 30 23:46:17 2009
@@ -88,7 +88,7 @@
       exprNodeDesc zero = new exprNodeConstantDesc("0");
       exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1);
       exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
-      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2); 
+      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("and", func1, func2);
       assert(func3 != null);
       filterDesc filterCtx = new filterDesc(func3, false);
 
@@ -148,7 +148,7 @@
       //fileSinkDesc fsd = new fileSinkDesc ("file:///tmp" + File.separator + System.getProperty("user.name") + File.separator + "TestFileSinkOperator",
       //                                     Utilities.defaultTd, false);
       //Operator<fileSinkDesc> flop = OperatorFactory.getAndMakeChild(fsd, op);
-      
+
       op.initialize(new JobConf(TestOperators.class), new ObjectInspector[]{r[0].oi});
 
       // evaluate on row
@@ -244,7 +244,7 @@
       pathToAliases.put("/testDir", aliases);
 
       // initialize pathToTableInfo
-      // Default: treat the table as a single column "col" 
+      // Default: treat the table as a single column "col"
       tableDesc td = Utilities.defaultTd;
       partitionDesc pd = new partitionDesc(td, null);
       LinkedHashMap<String,org.apache.hadoop.hive.ql.plan.partitionDesc> pathToPartitionInfo = new
@@ -276,7 +276,7 @@
       InspectableObject io2 = new InspectableObject();
       for(int i=0; i<5; i++) {
         String answer = "[[" + i + ", " + (i+1) + ", " + (i+2) + "]]";
-        
+
         tw.set("" + i + "\u0001" + (i+1) + "\u0001"+ (i+2));
         mo.process((Writable)tw);
         cdop1.retrieve(io1);

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out?rev=894741&r1=894740&r2=894741&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out Wed Dec 30 23:46:17 2009
@@ -5,7 +5,6 @@
 !
 %
 &
-&&
 *
 +
 -
@@ -124,7 +123,6 @@
 when
 year
 |
-||
 ~
 PREHOOK: query: SHOW FUNCTIONS '^c.*'
 PREHOOK: type: SHOWFUNCTIONS