You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/21 11:38:15 UTC

svn commit: r901644 [28/37] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java Thu Jan 21 10:37:58 2010
@@ -32,138 +32,147 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 
-
 public class UDFToLong extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToLong.class.getName());
 
   LongWritable longWritable = new LongWritable();
-  
+
   public UDFToLong() {
   }
 
   /**
    * Convert from void to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The void value to convert
+   * 
+   * @param i
+   *          The void value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(NullWritable i)  {
+  public LongWritable evaluate(NullWritable i) {
     return null;
   }
 
   /**
    * Convert from boolean to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The boolean value to convert
+   * 
+   * @param i
+   *          The boolean value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(BooleanWritable i)  {
+  public LongWritable evaluate(BooleanWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set(i.get()? (long)1 : (long) 0);
+      longWritable.set(i.get() ? (long) 1 : (long) 0);
       return longWritable;
     }
   }
 
   /**
    * Convert from byte to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The byte value to convert
+   * 
+   * @param i
+   *          The byte value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(ByteWritable i)  {
+  public LongWritable evaluate(ByteWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set((long)i.get());
+      longWritable.set(i.get());
       return longWritable;
     }
   }
-  
+
   /**
    * Convert from short to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The short value to convert
+   * 
+   * @param i
+   *          The short value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(ShortWritable i)  {
+  public LongWritable evaluate(ShortWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set((long)i.get());
+      longWritable.set(i.get());
       return longWritable;
     }
   }
-  
+
   /**
    * Convert from integer to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The integer value to convert
+   * 
+   * @param i
+   *          The integer value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(IntWritable i)  {
+  public LongWritable evaluate(IntWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set((long)i.get());
+      longWritable.set(i.get());
       return longWritable;
     }
   }
 
   /**
    * Convert from long to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The long value to convert
+   * 
+   * @param i
+   *          The long value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(LongWritable i)  {
+  public LongWritable evaluate(LongWritable i) {
     return i;
   }
 
   /**
    * Convert from float to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The float value to convert
+   * 
+   * @param i
+   *          The float value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(FloatWritable i)  {
+  public LongWritable evaluate(FloatWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set((long)i.get());
+      longWritable.set((long) i.get());
       return longWritable;
     }
   }
-  
+
   /**
    * Convert from double to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The double value to convert
+   * 
+   * @param i
+   *          The double value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(DoubleWritable i)  {
+  public LongWritable evaluate(DoubleWritable i) {
     if (i == null) {
       return null;
     } else {
-      longWritable.set((long)i.get());
+      longWritable.set((long) i.get());
       return longWritable;
     }
   }
-  
+
   /**
    * Convert from string to a long. This is called for CAST(... AS BIGINT)
-   *
-   * @param i The string value to convert
+   * 
+   * @param i
+   *          The string value to convert
    * @return LongWritable
    */
-  public LongWritable evaluate(Text i)  {
+  public LongWritable evaluate(Text i) {
     if (i == null) {
       return null;
     } else {
       try {
-        longWritable.set(LazyLong.parseLong(i.getBytes(), 0 , i.getLength(), 10));
+        longWritable
+            .set(LazyLong.parseLong(i.getBytes(), 0, i.getLength(), 10));
         return longWritable;
       } catch (NumberFormatException e) {
         // MySQL returns 0 if the string is not a well-formed numeric value.
@@ -173,5 +182,5 @@
       }
     }
   }
-  
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java Thu Jan 21 10:37:58 2010
@@ -24,7 +24,6 @@
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.lazy.LazyByte;
 import org.apache.hadoop.hive.serde2.lazy.LazyShort;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -33,128 +32,136 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 
-
 public class UDFToShort extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToByte.class.getName());
 
   ShortWritable shortWritable = new ShortWritable();
-  
+
   public UDFToShort() {
   }
 
   /**
    * Convert from void to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The void value to convert
+   * 
+   * @param i
+   *          The void value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(NullWritable i)  {
+  public ShortWritable evaluate(NullWritable i) {
     return null;
   }
 
   /**
    * Convert from boolean to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The boolean value to convert
+   * 
+   * @param i
+   *          The boolean value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(BooleanWritable i)  {
+  public ShortWritable evaluate(BooleanWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set(i.get() ? (short)1 : (short)0);
+      shortWritable.set(i.get() ? (short) 1 : (short) 0);
       return shortWritable;
     }
   }
 
   /**
    * Convert from byte to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The byte value to convert
+   * 
+   * @param i
+   *          The byte value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(ByteWritable i)  {
+  public ShortWritable evaluate(ByteWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set((short)i.get());
+      shortWritable.set(i.get());
       return shortWritable;
     }
   }
 
   /**
    * Convert from integer to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The integer value to convert
+   * 
+   * @param i
+   *          The integer value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(IntWritable i)  {
+  public ShortWritable evaluate(IntWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set((short)i.get());
+      shortWritable.set((short) i.get());
       return shortWritable;
     }
   }
 
   /**
    * Convert from long to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The long value to convert
+   * 
+   * @param i
+   *          The long value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(LongWritable i)  {
+  public ShortWritable evaluate(LongWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set((short)i.get());
+      shortWritable.set((short) i.get());
       return shortWritable;
     }
   }
-  
+
   /**
    * Convert from float to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The float value to convert
+   * 
+   * @param i
+   *          The float value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(FloatWritable i)  {
+  public ShortWritable evaluate(FloatWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set((short)i.get());
+      shortWritable.set((short) i.get());
       return shortWritable;
     }
   }
-  
+
   /**
    * Convert from double to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The double value to convert
+   * 
+   * @param i
+   *          The double value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(DoubleWritable i)  {
+  public ShortWritable evaluate(DoubleWritable i) {
     if (i == null) {
       return null;
     } else {
-      shortWritable.set((short)i.get());
+      shortWritable.set((short) i.get());
       return shortWritable;
     }
   }
-  
+
   /**
    * Convert from string to a short. This is called for CAST(... AS SMALLINT)
-   *
-   * @param i The string value to convert
+   * 
+   * @param i
+   *          The string value to convert
    * @return ShortWritable
    */
-  public ShortWritable evaluate(Text i)  {
+  public ShortWritable evaluate(Text i) {
     if (i == null) {
       return null;
     } else {
       try {
-        shortWritable.set(LazyShort.parseShort(i.getBytes(), 0 , i.getLength(), 10));
+        shortWritable.set(LazyShort.parseShort(i.getBytes(), 0, i.getLength(),
+            10));
         return shortWritable;
       } catch (NumberFormatException e) {
         // MySQL returns 0 if the string is not a well-formed numeric value.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java Thu Jan 21 10:37:58 2010
@@ -21,7 +21,6 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.UDF;
-
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -35,24 +34,24 @@
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 
-
 public class UDFToString extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToString.class.getName());
 
   Text t = new Text();
   ByteStream.Output out = new ByteStream.Output();
-  
+
   public UDFToString() {
   }
 
-  public Text evaluate(NullWritable i)  {
+  public Text evaluate(NullWritable i) {
     return null;
   }
 
-  byte[] trueBytes = {'T', 'R', 'U', 'E'};
-  byte[] falseBytes = {'F', 'A', 'L', 'S', 'E'};
-  public Text evaluate(BooleanWritable i)  {
+  byte[] trueBytes = { 'T', 'R', 'U', 'E' };
+  byte[] falseBytes = { 'F', 'A', 'L', 'S', 'E' };
+
+  public Text evaluate(BooleanWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -61,8 +60,8 @@
       return t;
     }
   }
-  
-  public Text evaluate(ByteWritable i)  {
+
+  public Text evaluate(ByteWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -72,8 +71,8 @@
       return t;
     }
   }
-  
-  public Text evaluate(ShortWritable i)  {
+
+  public Text evaluate(ShortWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -83,8 +82,8 @@
       return t;
     }
   }
-    
-  public Text evaluate(IntWritable i)  {
+
+  public Text evaluate(IntWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -95,7 +94,7 @@
     }
   }
 
-  public Text evaluate(LongWritable i)  {
+  public Text evaluate(LongWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -105,8 +104,8 @@
       return t;
     }
   }
-  
-  public Text evaluate(FloatWritable i)  {
+
+  public Text evaluate(FloatWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -114,8 +113,8 @@
       return t;
     }
   }
-  
-  public Text evaluate(DoubleWritable i)  {
+
+  public Text evaluate(DoubleWritable i) {
     if (i == null) {
       return null;
     } else {
@@ -123,5 +122,5 @@
       return t;
     }
   }
-  
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java Thu Jan 21 10:37:58 2010
@@ -18,25 +18,18 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.description;
 import org.apache.hadoop.io.Text;
-import org.apache.commons.lang.StringUtils;
 
-import java.util.regex.Pattern;
-import java.util.regex.Matcher;
-
-@description(
-    name = "trim",
-    value = "_FUNC_(str) - Removes the leading and trailing space characters " +
-    		"from str ",
-    extended = "Example:\n" +
-        "  > SELECT _FUNC_('   facebook  ') FROM src LIMIT 1;\n" +
-        "  'facebook'"
-    )
+@description(name = "trim", value = "_FUNC_(str) - Removes the leading and trailing space characters "
+    + "from str ", extended = "Example:\n"
+    + "  > SELECT _FUNC_('   facebook  ') FROM src LIMIT 1;\n" + "  'facebook'")
 public class UDFTrim extends UDF {
 
   Text result = new Text();
+
   public UDFTrim() {
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFType.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFType.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFType.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFType.java Thu Jan 21 10:37:58 2010
@@ -1,9 +1,12 @@
 package org.apache.hadoop.hive.ql.udf;
 
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
 
-import java.lang.annotation.*;
-
-@Target(ElementType.TYPE) 
+@Target(ElementType.TYPE)
 @Retention(RetentionPolicy.RUNTIME)
 @Inherited
 public @interface UDFType {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnhex.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnhex.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnhex.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnhex.java Thu Jan 21 10:37:58 2010
@@ -20,52 +20,48 @@
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.description;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-@description(
-    name = "unhex",
-    value = "_FUNC_(str) - Converts hexadecimal argument to string",
-    extended = "Performs the inverse operation of HEX(str). That is, it interprets\n" +
-    		"each pair of hexadecimal digits in the argument as a number and\n" +
-    		"converts it to the character represented by the number. The\n" +
-    		"resulting characters are returned as a binary string.\n\n" +
-    		"Example:\n" +
-    		"> SELECT UNHEX('4D7953514C') from src limit 1;\n" +
-    		"'MySQL'\n" +
-    		"> SELECT UNHEX(HEX('string')) from src limit 1;\n" +
-    		"'string'\n" +
-    		"> SELECT HEX(UNHEX('1267')) from src limit 1;\n" +
-    		"'1267'\n\n" +
-    		"The characters in the argument string must be legal hexadecimal\n" +
-    		"digits: '0' .. '9', 'A' .. 'F', 'a' .. 'f'. If UNHEX() encounters\n" +
-    		"any nonhexadecimal digits in the argument, it returns NULL. Also,\n" +
-    		"if there are an odd number of characters a leading 0 is appended."
-    )
-public class UDFUnhex extends UDF { 
-  
+
+@description(name = "unhex", value = "_FUNC_(str) - Converts hexadecimal argument to string", extended = "Performs the inverse operation of HEX(str). That is, it interprets\n"
+    + "each pair of hexadecimal digits in the argument as a number and\n"
+    + "converts it to the character represented by the number. The\n"
+    + "resulting characters are returned as a binary string.\n\n"
+    + "Example:\n"
+    + "> SELECT UNHEX('4D7953514C') from src limit 1;\n"
+    + "'MySQL'\n"
+    + "> SELECT UNHEX(HEX('string')) from src limit 1;\n"
+    + "'string'\n"
+    + "> SELECT HEX(UNHEX('1267')) from src limit 1;\n"
+    + "'1267'\n\n"
+    + "The characters in the argument string must be legal hexadecimal\n"
+    + "digits: '0' .. '9', 'A' .. 'F', 'a' .. 'f'. If UNHEX() encounters\n"
+    + "any nonhexadecimal digits in the argument, it returns NULL. Also,\n"
+    + "if there are an odd number of characters a leading 0 is appended.")
+public class UDFUnhex extends UDF {
+
   /**
-   * Convert every two hex digits in s into 
+   * Convert every two hex digits in s into
    * 
    */
   public Text evaluate(Text s) {
     if (s == null) {
       return null;
     }
-    
-    //append a leading 0 if needed
+
+    // append a leading 0 if needed
     String str;
-    if (s.getLength() % 2 == 1)
+    if (s.getLength() % 2 == 1) {
       str = "0" + s.toString();
-    else
+    } else {
       str = s.toString();
-   
-    byte [] result = new byte[str.length()/2];
-    for(int i = 0; i < str.length(); i += 2) {
+    }
+
+    byte[] result = new byte[str.length() / 2];
+    for (int i = 0; i < str.length(); i += 2) {
       try {
-        result[i/2] = ((byte) Integer.parseInt(str.substring(i, i+2), 16));
+        result[i / 2] = ((byte) Integer.parseInt(str.substring(i, i + 2), 16));
       } catch (NumberFormatException e) {
-        //invalid character present, return null
+        // invalid character present, return null
         return null;
       }
     }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUnixTimeStamp.java Thu Jan 21 10:37:58 2010
@@ -18,9 +18,8 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.text.SimpleDateFormat;
 import java.text.ParseException;
-import java.util.TimeZone;
+import java.text.SimpleDateFormat;
 import java.util.Date;
 
 import org.apache.commons.logging.Log;
@@ -30,30 +29,28 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-
-@UDFType(deterministic=false)
-@description(
-    name = "unix_timestamp",
-    value = "_FUNC_([date[, pattern]]) - Returns the UNIX timestamp",
-    extended = "Converts the current or specified time to number of seconds " +
-    		"since 1970-01-01."
-    )
+@UDFType(deterministic = false)
+@description(name = "unix_timestamp", value = "_FUNC_([date[, pattern]]) - Returns the UNIX timestamp", extended = "Converts the current or specified time to number of seconds "
+    + "since 1970-01-01.")
 public class UDFUnixTimeStamp extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFUnixTimeStamp.class.getName());
 
-  //  For now, we just use the default time zone.
-  private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  // For now, we just use the default time zone.
+  private final SimpleDateFormat formatter = new SimpleDateFormat(
+      "yyyy-MM-dd HH:mm:ss");
 
   LongWritable result = new LongWritable();
+
   public UDFUnixTimeStamp() {
   }
 
   /**
    * Return current UnixTime.
+   * 
    * @return long Number of seconds from 1970-01-01 00:00:00
    */
-  public LongWritable evaluate()  {
+  public LongWritable evaluate() {
     Date date = new Date();
     result.set(date.getTime() / 1000);
     return result;
@@ -61,16 +58,18 @@
 
   /**
    * Convert time string to UnixTime.
-   * @param dateText Time string in format yyyy-MM-dd HH:mm:ss
+   * 
+   * @param dateText
+   *          Time string in format yyyy-MM-dd HH:mm:ss
    * @return long Number of seconds from 1970-01-01 00:00:00
    */
-  public LongWritable evaluate(Text dateText)  {
+  public LongWritable evaluate(Text dateText) {
     if (dateText == null) {
       return null;
     }
 
     try {
-      Date date = (Date)formatter.parse(dateText.toString());
+      Date date = formatter.parse(dateText.toString());
       result.set(date.getTime() / 1000);
       return result;
     } catch (ParseException e) {
@@ -79,13 +78,17 @@
   }
 
   Text lastPatternText = new Text();
+
   /**
    * Convert time string to UnixTime with user defined pattern.
-   * @param dateText Time string in format patternstring
-   * @param patternText Time patterns string supported by SimpleDateFormat
+   * 
+   * @param dateText
+   *          Time string in format patternstring
+   * @param patternText
+   *          Time patterns string supported by SimpleDateFormat
    * @return long Number of seconds from 1970-01-01 00:00:00
    */
-  public LongWritable evaluate(Text dateText, Text patternText)  {
+  public LongWritable evaluate(Text dateText, Text patternText) {
     if (dateText == null || patternText == null) {
       return null;
     }
@@ -93,7 +96,7 @@
       if (!patternText.equals(lastPatternText)) {
         formatter.applyPattern(patternText.toString());
         lastPatternText.set(patternText);
-      }      
+      }
     } catch (Exception e) {
       return null;
     }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java Thu Jan 21 10:37:58 2010
@@ -22,16 +22,12 @@
 import org.apache.hadoop.hive.ql.exec.description;
 import org.apache.hadoop.io.Text;
 
-@description(
-    name = "upper,ucase",
-    value = "_FUNC_(str) - Returns str with all characters changed to uppercase",
-    extended = "Example:\n" +
-        "  > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" +
-        "  'FACEBOOK'"
-    )
+@description(name = "upper,ucase", value = "_FUNC_(str) - Returns str with all characters changed to uppercase", extended = "Example:\n"
+    + "  > SELECT _FUNC_('Facebook') FROM src LIMIT 1;\n" + "  'FACEBOOK'")
 public class UDFUpper extends UDF {
 
   Text t = new Text();
+
   public UDFUpper() {
   }
 
@@ -42,5 +38,5 @@
     t.set(s.toString().toUpperCase());
     return t;
   }
-  
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java Thu Jan 21 10:37:58 2010
@@ -30,24 +30,20 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
-@description(
-    name = "yearweek",
-    value = "_FUNC_(date) - Returns the week of the year of the given date. A week " +
-            "is considered to start on a Monday and week 1 is the first week with >3 days.",
-    extended = "Examples:\n" +
-        "  > SELECT _FUNC_('2008-02-20') FROM src LIMIT 1;\n" +
-        "  8\n" +
-        "  > SELECT _FUNC_('1980-12-31 12:59:59') FROM src LIMIT 1;\n" +
-        "  1"
-    )
+@description(name = "yearweek", value = "_FUNC_(date) - Returns the week of the year of the given date. A week "
+    + "is considered to start on a Monday and week 1 is the first week with >3 days.", extended = "Examples:\n"
+    + "  > SELECT _FUNC_('2008-02-20') FROM src LIMIT 1;\n"
+    + "  8\n"
+    + "  > SELECT _FUNC_('1980-12-31 12:59:59') FROM src LIMIT 1;\n" + "  1")
 public class UDFWeekOfYear extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFWeekOfYear.class.getName());
 
-  private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private Calendar calendar = Calendar.getInstance();
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
 
   IntWritable result = new IntWritable();
+
   public UDFWeekOfYear() {
     calendar.setFirstDayOfWeek(Calendar.MONDAY);
     calendar.setMinimalDaysInFirstWeek(4);
@@ -56,10 +52,13 @@
   /**
    * Get the week of the year from a date string.
    * 
-   * @param dateString the dateString in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
-   * @return an int from 1 to 53. null if the dateString is not a valid date string.
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 1 to 53. null if the dateString is not a valid date
+   *         string.
    */
-  public IntWritable evaluate(Text dateString)  {
+  public IntWritable evaluate(Text dateString) {
     if (dateString == null) {
       return null;
     }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java Thu Jan 21 10:37:58 2010
@@ -30,38 +30,37 @@
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
-@description(
-    name = "year",
-    value = "_FUNC_(date) - Returns the year of date",
-    extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or " +
-        "'yyyy-MM-dd'.\n" +
-        "Example:\n " +
-        "  > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" +
-        "  2009"
-    )
+@description(name = "year", value = "_FUNC_(date) - Returns the year of date", extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
+    + "'yyyy-MM-dd'.\n"
+    + "Example:\n "
+    + "  > SELECT _FUNC_('2009-30-07', 1) FROM src LIMIT 1;\n" + "  2009")
 public class UDFYear extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFYear.class.getName());
 
-  private SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private Calendar calendar = Calendar.getInstance();
+  private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+  private final Calendar calendar = Calendar.getInstance();
 
   IntWritable result = new IntWritable();
+
   public UDFYear() {
   }
 
   /**
    * Get the year from a date string.
    * 
-   * @param dateString the dateString in the format of "yyyy-MM-dd HH:mm:ss" or "yyyy-MM-dd".
-   * @return an int from 1 to 12. null if the dateString is not a valid date string.
+   * @param dateString
+   *          the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
+   *          "yyyy-MM-dd".
+   * @return an int from 1 to 12. null if the dateString is not a valid date
+   *         string.
    */
-  public IntWritable evaluate(Text dateString)  {
-    
+  public IntWritable evaluate(Text dateString) {
+
     if (dateString == null) {
       return null;
     }
-    
+
     try {
       Date date = formatter.parse(dateString.toString());
       calendar.setTime(date);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/Collector.java Thu Jan 21 10:37:58 2010
@@ -26,6 +26,7 @@
 public interface Collector {
   /**
    * Other classes will call collect() with the data that it has.
+   * 
    * @param input
    */
   void collect(Object input) throws HiveException;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java Thu Jan 21 10:37:58 2010
@@ -40,82 +40,82 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.util.StringUtils;
 
-@description(
-    name = "avg",
-    value = "_FUNC_(x) - Returns the mean of a set of numbers"
-)
+@description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers")
 public class GenericUDAFAverage implements GenericUDAFResolver {
 
   static final Log LOG = LogFactory.getLog(GenericUDAFAverage.class.getName());
-  
+
   @Override
-  public GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
     }
-    
+
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
-          "Only primitive type arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
+          "Only primitive type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
-    switch (((PrimitiveTypeInfo)parameters[0]).getPrimitiveCategory()) {
-      case BYTE:
-      case SHORT:
-      case INT:
-      case LONG:
-      case FLOAT:
-      case DOUBLE:
-      case STRING:
-        return new GenericUDAFAverageEvaluator();
-      case BOOLEAN:
-      default:
-        throw new UDFArgumentTypeException(0,
-            "Only numeric or string type arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
+    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+    case STRING:
+      return new GenericUDAFAverageEvaluator();
+    case BOOLEAN:
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric or string type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
   }
-  
+
   public static class GenericUDAFAverageEvaluator extends GenericUDAFEvaluator {
 
     // For PARTIAL1 and COMPLETE
     PrimitiveObjectInspector inputOI;
-    
+
     // For PARTIAL2 and FINAL
     StructObjectInspector soi;
     StructField countField;
     StructField sumField;
     LongObjectInspector countFieldOI;
     DoubleObjectInspector sumFieldOI;
-    
+
     // For PARTIAL1 and PARTIAL2
     Object[] partialResult;
-    
+
     // For FINAL and COMPLETE
     DoubleWritable result;
-    
+
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
         throws HiveException {
-      assert(parameters.length == 1);
+      assert (parameters.length == 1);
       super.init(m, parameters);
-      
+
       // init input
-      if (mode == mode.PARTIAL1 || mode == mode.COMPLETE) {
-        inputOI = (PrimitiveObjectInspector)parameters[0];
+      if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
+        inputOI = (PrimitiveObjectInspector) parameters[0];
       } else {
-        soi = (StructObjectInspector)parameters[0];
+        soi = (StructObjectInspector) parameters[0];
         countField = soi.getStructFieldRef("count");
         sumField = soi.getStructFieldRef("sum");
-        countFieldOI = (LongObjectInspector)countField.getFieldObjectInspector();
-        sumFieldOI = (DoubleObjectInspector)sumField.getFieldObjectInspector();
+        countFieldOI = (LongObjectInspector) countField
+            .getFieldObjectInspector();
+        sumFieldOI = (DoubleObjectInspector) sumField.getFieldObjectInspector();
       }
-      
+
       // init output
-      if (mode == mode.PARTIAL1 || mode == mode.PARTIAL2) {
+      if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
         // The output of a partial aggregation is a struct containing
-        // a "long" count and a "double" sum. 
-        
+        // a "long" count and a "double" sum.
+
         ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
         foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
         foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
@@ -125,9 +125,9 @@
         partialResult = new Object[2];
         partialResult[0] = new LongWritable(0);
         partialResult[1] = new DoubleWritable(0);
-        return ObjectInspectorFactory.getStandardStructObjectInspector(
-            fname, foi);
-        
+        return ObjectInspectorFactory.getStandardStructObjectInspector(fname,
+            foi);
+
       } else {
         result = new DoubleWritable(0);
         return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
@@ -148,29 +148,31 @@
 
     @Override
     public void reset(AggregationBuffer agg) throws HiveException {
-      AverageAgg myagg = (AverageAgg)agg;
+      AverageAgg myagg = (AverageAgg) agg;
       myagg.count = 0;
-      myagg.sum = 0;      
+      myagg.sum = 0;
     }
-    
+
     boolean warned = false;
-    
+
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
-      assert(parameters.length == 1);
+    public void iterate(AggregationBuffer agg, Object[] parameters)
+        throws HiveException {
+      assert (parameters.length == 1);
       Object p = parameters[0];
       if (p != null) {
-        AverageAgg myagg = (AverageAgg)agg;
+        AverageAgg myagg = (AverageAgg) agg;
         try {
-          double v = PrimitiveObjectInspectorUtils.getDouble(p, 
-            (PrimitiveObjectInspector)inputOI);
-          myagg.count ++;
+          double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
+          myagg.count++;
           myagg.sum += v;
         } catch (NumberFormatException e) {
           if (!warned) {
             warned = true;
-            LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
-            LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+            LOG.warn(getClass().getSimpleName() + " "
+                + StringUtils.stringifyException(e));
+            LOG.warn(getClass().getSimpleName()
+                + " ignoring similar exceptions.");
           }
         }
       }
@@ -178,16 +180,17 @@
 
     @Override
     public Object terminatePartial(AggregationBuffer agg) throws HiveException {
-      AverageAgg myagg = (AverageAgg)agg;
-      ((LongWritable)partialResult[0]).set(myagg.count);
-      ((DoubleWritable)partialResult[1]).set(myagg.sum);
+      AverageAgg myagg = (AverageAgg) agg;
+      ((LongWritable) partialResult[0]).set(myagg.count);
+      ((DoubleWritable) partialResult[1]).set(myagg.sum);
       return partialResult;
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+    public void merge(AggregationBuffer agg, Object partial)
+        throws HiveException {
       if (partial != null) {
-        AverageAgg myagg = (AverageAgg)agg;
+        AverageAgg myagg = (AverageAgg) agg;
         Object partialCount = soi.getStructFieldData(partial, countField);
         Object partialSum = soi.getStructFieldData(partial, sumField);
         myagg.count += countFieldOI.get(partialCount);
@@ -197,7 +200,7 @@
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      AverageAgg myagg = (AverageAgg)agg;
+      AverageAgg myagg = (AverageAgg) agg;
       if (myagg.count == 0) {
         return null;
       } else {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java Thu Jan 21 10:37:58 2010
@@ -35,87 +35,87 @@
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
- * This class is a bridge between GenericUDAF and UDAF.
- * Old UDAF can be used with the GenericUDAF infrastructure through
- * this bridge.
+ * This class is a bridge between GenericUDAF and UDAF. Old UDAF can be used
+ * with the GenericUDAF infrastructure through this bridge.
  */
 public class GenericUDAFBridge implements GenericUDAFResolver {
 
   UDAF udaf;
-  
+
   public GenericUDAFBridge(UDAF udaf) {
     this.udaf = udaf;
   }
-  
+
   public Class<? extends UDAF> getUDAFClass() {
     return udaf.getClass();
   }
-  
+
   @Override
-  public GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException {
+
+    Class<? extends UDAFEvaluator> udafEvaluatorClass = udaf.getResolver()
+        .getEvaluatorClass(Arrays.asList(parameters));
 
-    Class<? extends UDAFEvaluator> udafEvaluatorClass = 
-      udaf.getResolver().getEvaluatorClass(Arrays.asList(parameters));
-    
     return new GenericUDAFBridgeEvaluator(udafEvaluatorClass);
   }
-  
+
   public static class GenericUDAFBridgeEvaluator extends GenericUDAFEvaluator
-    implements Serializable {
-    
+      implements Serializable {
+
     private static final long serialVersionUID = 1L;
 
     // Used by serialization only
     public GenericUDAFBridgeEvaluator() {
     }
+
     public Class<? extends UDAFEvaluator> getUdafEvaluator() {
       return udafEvaluator;
     }
+
     public void setUdafEvaluator(Class<? extends UDAFEvaluator> udafEvaluator) {
       this.udafEvaluator = udafEvaluator;
     }
 
-    
-    public GenericUDAFBridgeEvaluator(Class<? extends UDAFEvaluator> udafEvaluator) {
+    public GenericUDAFBridgeEvaluator(
+        Class<? extends UDAFEvaluator> udafEvaluator) {
       this.udafEvaluator = udafEvaluator;
     }
-    
-    
+
     Class<? extends UDAFEvaluator> udafEvaluator;
-    
+
     transient ObjectInspector[] parameterOIs;
     transient Object result;
-    
+
     transient Method iterateMethod;
     transient Method mergeMethod;
     transient Method terminatePartialMethod;
     transient Method terminateMethod;
 
-    transient ConversionHelper conversionHelper; 
-    
+    transient ConversionHelper conversionHelper;
+
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
         throws HiveException {
       super.init(m, parameters);
-      this.parameterOIs = parameters;
-      
+      parameterOIs = parameters;
+
       // Get the reflection methods from ue
-      for (Method method : udafEvaluator.getMethods()){
-        if (method.getName().equals("iterate")) {              
+      for (Method method : udafEvaluator.getMethods()) {
+        if (method.getName().equals("iterate")) {
           iterateMethod = method;
         }
-        if (method.getName().equals("merge")) {              
+        if (method.getName().equals("merge")) {
           mergeMethod = method;
         }
-        if (method.getName().equals("terminatePartial")) {              
+        if (method.getName().equals("terminatePartial")) {
           terminatePartialMethod = method;
         }
-        if (method.getName().equals("terminate")) {              
+        if (method.getName().equals("terminate")) {
           terminateMethod = method;
         }
       }
-      
+
       // Input: do Java/Writable conversion if needed
       Method aggregateMethod = null;
       if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
@@ -124,7 +124,7 @@
         aggregateMethod = mergeMethod;
       }
       conversionHelper = new ConversionHelper(aggregateMethod, parameters);
-      
+
       // Output: get the evaluate method
       Method evaluateMethod = null;
       if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
@@ -135,55 +135,57 @@
       // Get the output ObjectInspector from the return type.
       Type returnType = evaluateMethod.getGenericReturnType();
       try {
-        return ObjectInspectorFactory.getReflectionObjectInspector(returnType, 
+        return ObjectInspectorFactory.getReflectionObjectInspector(returnType,
             ObjectInspectorOptions.JAVA);
       } catch (RuntimeException e) {
-        throw new HiveException("Cannot recognize return type " + returnType +
-            " from " + evaluateMethod, e);
+        throw new HiveException("Cannot recognize return type " + returnType
+            + " from " + evaluateMethod, e);
       }
     }
-    
+
     /** class for storing UDAFEvaluator value */
     static class UDAFAgg implements AggregationBuffer {
       UDAFEvaluator ueObject;
+
       UDAFAgg(UDAFEvaluator ueObject) {
         this.ueObject = ueObject;
       }
     }
-    
+
     @Override
     public AggregationBuffer getNewAggregationBuffer() {
-      return new UDAFAgg((UDAFEvaluator)ReflectionUtils.newInstance(udafEvaluator, null));
+      return new UDAFAgg((UDAFEvaluator) ReflectionUtils.newInstance(
+          udafEvaluator, null));
     }
 
     @Override
     public void reset(AggregationBuffer agg) throws HiveException {
-      ((UDAFAgg)agg).ueObject.init();
+      ((UDAFAgg) agg).ueObject.init();
     }
 
     @Override
     public void iterate(AggregationBuffer agg, Object[] parameters)
         throws HiveException {
-      FunctionRegistry.invoke(iterateMethod, ((UDAFAgg)agg).ueObject,
+      FunctionRegistry.invoke(iterateMethod, ((UDAFAgg) agg).ueObject,
           conversionHelper.convertIfNecessary(parameters));
     }
 
     @Override
     public void merge(AggregationBuffer agg, Object partial)
         throws HiveException {
-      FunctionRegistry.invoke(mergeMethod, ((UDAFAgg)agg).ueObject, 
+      FunctionRegistry.invoke(mergeMethod, ((UDAFAgg) agg).ueObject,
           conversionHelper.convertIfNecessary(partial));
     }
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      return FunctionRegistry.invoke(terminateMethod, ((UDAFAgg)agg).ueObject);
+      return FunctionRegistry.invoke(terminateMethod, ((UDAFAgg) agg).ueObject);
     }
 
     @Override
-    public Object terminatePartial(AggregationBuffer agg)
-        throws HiveException {
-      return FunctionRegistry.invoke(terminatePartialMethod, ((UDAFAgg)agg).ueObject);
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      return FunctionRegistry.invoke(terminatePartialMethod,
+          ((UDAFAgg) agg).ueObject);
     }
 
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCount.java Thu Jan 21 10:37:58 2010
@@ -30,41 +30,38 @@
 /**
  * This class implements the COUNT aggregation function as in SQL.
  */
-@description(
-    name = "count",
-    value = "_FUNC_(x) - Returns the count"
-)
+@description(name = "count", value = "_FUNC_(x) - Returns the count")
 public class GenericUDAFCount implements GenericUDAFResolver {
 
   @Override
-  public GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
     }
     return new GenericUDAFCountEvaluator();
   }
-  
+
   public static class GenericUDAFCountEvaluator extends GenericUDAFEvaluator {
     ObjectInspector inputOI;
-    LongWritable result; 
+    LongWritable result;
 
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
         throws HiveException {
       super.init(m, parameters);
-      assert(parameters.length == 1);
+      assert (parameters.length == 1);
       inputOI = parameters[0];
       result = new LongWritable(0);
       return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
     }
-    
+
     /** class for storing count value */
     static class CountAgg implements AggregationBuffer {
       long value;
     }
-    
+
     @Override
     public AggregationBuffer getNewAggregationBuffer() throws HiveException {
       CountAgg result = new CountAgg();
@@ -74,15 +71,15 @@
 
     @Override
     public void reset(AggregationBuffer agg) throws HiveException {
-      ((CountAgg)agg).value = 0;
+      ((CountAgg) agg).value = 0;
     }
 
     @Override
     public void iterate(AggregationBuffer agg, Object[] parameters)
         throws HiveException {
-      assert(parameters.length == 1);
+      assert (parameters.length == 1);
       if (parameters[0] != null) {
-        ((CountAgg)agg).value ++;
+        ((CountAgg) agg).value++;
       }
     }
 
@@ -90,20 +87,19 @@
     public void merge(AggregationBuffer agg, Object partial)
         throws HiveException {
       if (partial != null) {
-        long p = ((LongObjectInspector)inputOI).get(partial);
-        ((CountAgg)agg).value += p;
+        long p = ((LongObjectInspector) inputOI).get(partial);
+        ((CountAgg) agg).value += p;
       }
     }
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      result.set(((CountAgg)agg).value);
+      result.set(((CountAgg) agg).value);
       return result;
     }
 
     @Override
-    public Object terminatePartial(AggregationBuffer agg)
-        throws HiveException {
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
       return terminate(agg);
     }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java Thu Jan 21 10:37:58 2010
@@ -23,61 +23,81 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
- * A Generic User-defined aggregation function (GenericUDAF) for the use with 
+ * A Generic User-defined aggregation function (GenericUDAF) for the use with
  * Hive.
  * 
  * New GenericUDAF classes need to inherit from this GenericUDAF class.
  * 
- * The GenericUDAF are superior to normal UDAFs in the following ways:
- * 1. It can accept arguments of complex types, and return complex types.
- * 2. It can accept variable length of arguments.
- * 3. It can accept an infinite number of function signature - for example, 
- *    it's easy to write a GenericUDAF that accepts array<int>, 
- *    array<array<int>> and so on (arbitrary levels of nesting).
+ * The GenericUDAF are superior to normal UDAFs in the following ways: 1. It can
+ * accept arguments of complex types, and return complex types. 2. It can accept
+ * variable length of arguments. 3. It can accept an infinite number of function
+ * signature - for example, it's easy to write a GenericUDAF that accepts
+ * array<int>, array<array<int>> and so on (arbitrary levels of nesting).
  */
-@UDFType(deterministic=true)
+@UDFType(deterministic = true)
 public abstract class GenericUDAFEvaluator {
-  
+
   static public enum Mode {
-    /** PARTIAL1: from original data to partial aggregation data: iterate() and terminatePartial() will be called */
+    /**
+     * PARTIAL1: from original data to partial aggregation data: iterate() and
+     * terminatePartial() will be called
+     */
     PARTIAL1,
-    /** PARTIAL2: from partial aggregation data to partial aggregation data: merge() and terminatePartial() will be called */
+    /**
+     * PARTIAL2: from partial aggregation data to partial aggregation data:
+     * merge() and terminatePartial() will be called
+     */
     PARTIAL2,
-    /** FINAL: from partial aggregation to full aggregation: merge() and terminate() will be called */
+    /**
+     * FINAL: from partial aggregation to full aggregation: merge() and
+     * terminate() will be called
+     */
     FINAL,
-    /** COMPLETE: from original data directly to full aggregation: iterate() and terminate() will be called */
+    /**
+     * COMPLETE: from original data directly to full aggregation: iterate() and
+     * terminate() will be called
+     */
     COMPLETE
   };
 
   Mode mode;
+
   /**
    * The constructor
    */
   public GenericUDAFEvaluator() {
   }
 
-  /** Initialize the evaluator.
-   *  @param m  The mode of aggregation.
-   *  @param parameters  The ObjectInspector for the parameters:
-   *    In PARTIAL1 and COMPLETE mode, the parameters are original data;
-   *    In PARTIAL2 and FINAL mode, the parameters are just partial aggregations (in that case, the array will always have a single element).
-   *  @return  The ObjectInspector for the return value.
-   *    In PARTIAL1 and PARTIAL2 mode, the ObjectInspector for the return value of terminatePartial() call;
-   *    In FINAL and COMPLETE mode, the ObjectInspector for the return value of terminate() call.
-   *    
-   *  NOTE: We need ObjectInspector[] (in addition to the TypeInfo[] in GenericUDAFResolver) for 2 reasons:
-   *  1. ObjectInspector contains more information than TypeInfo;  
-   *     and GenericUDAFEvaluator.init at execution time.
-   *  2. We call GenericUDAFResolver.getEvaluator at compilation time, 
+  /**
+   * Initialize the evaluator.
+   * 
+   * @param m
+   *          The mode of aggregation.
+   * @param parameters
+   *          The ObjectInspector for the parameters: In PARTIAL1 and COMPLETE
+   *          mode, the parameters are original data; In PARTIAL2 and FINAL
+   *          mode, the parameters are just partial aggregations (in that case,
+   *          the array will always have a single element).
+   * @return The ObjectInspector for the return value. In PARTIAL1 and PARTIAL2
+   *         mode, the ObjectInspector for the return value of
+   *         terminatePartial() call; In FINAL and COMPLETE mode, the
+   *         ObjectInspector for the return value of terminate() call.
+   * 
+   *         NOTE: We need ObjectInspector[] (in addition to the TypeInfo[] in
+   *         GenericUDAFResolver) for 2 reasons: 1. ObjectInspector contains
+   *         more information than TypeInfo; and GenericUDAFEvaluator.init at
+   *         execution time. 2. We call GenericUDAFResolver.getEvaluator at
+   *         compilation time,
    */
-  public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+  public ObjectInspector init(Mode m, ObjectInspector[] parameters)
+      throws HiveException {
     // This function should be overriden in every sub class
     // And the sub class should call super.init(m, parameters) to get mode set.
     mode = m;
     return null;
   }
 
-  /** 
+  /**
    * The interface for a class that is used to store the aggregation result
    * during the process of aggregation.
    * 
@@ -90,33 +110,44 @@
    */
   public static interface AggregationBuffer {
   };
-  
+
   /**
    * Get a new aggregation object.
    */
-  public abstract AggregationBuffer getNewAggregationBuffer() throws HiveException;
+  public abstract AggregationBuffer getNewAggregationBuffer()
+      throws HiveException;
 
-  /** Reset the aggregation. This is useful if we want to reuse the same aggregation.
+  /**
+   * Reset the aggregation. This is useful if we want to reuse the same
+   * aggregation.
    */
   public abstract void reset(AggregationBuffer agg) throws HiveException;
 
   /**
-   * This function will be called by GroupByOperator when it sees a new input row.
-   * @param agg  The object to store the aggregation result.  
-   * @param parameters  The row, can be inspected by the OIs passed in init().
+   * This function will be called by GroupByOperator when it sees a new input
+   * row.
+   * 
+   * @param agg
+   *          The object to store the aggregation result.
+   * @param parameters
+   *          The row, can be inspected by the OIs passed in init().
    */
-  public void aggregate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+  public void aggregate(AggregationBuffer agg, Object[] parameters)
+      throws HiveException {
     if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
       iterate(agg, parameters);
     } else {
-      assert(parameters.length == 1);
+      assert (parameters.length == 1);
       merge(agg, parameters[0]);
     }
   }
 
   /**
-   * This function will be called by GroupByOperator when it sees a new input row.
-   * @param agg  The object to store the aggregation result.  
+   * This function will be called by GroupByOperator when it sees a new input
+   * row.
+   * 
+   * @param agg
+   *          The object to store the aggregation result.
    */
   public Object evaluate(AggregationBuffer agg) throws HiveException {
     if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
@@ -125,25 +156,38 @@
       return terminate(agg);
     }
   }
-  
-  /** Iterate through original data.
-   *  @param parameters  The objects of parameters.
+
+  /**
+   * Iterate through original data.
+   * 
+   * @param parameters
+   *          The objects of parameters.
    */
-  public abstract void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException;
+  public abstract void iterate(AggregationBuffer agg, Object[] parameters)
+      throws HiveException;
 
-  /** Get partial aggregation result.
-   *  @return partial aggregation result.
+  /**
+   * Get partial aggregation result.
+   * 
+   * @return partial aggregation result.
    */
-  public abstract Object terminatePartial(AggregationBuffer agg) throws HiveException;
+  public abstract Object terminatePartial(AggregationBuffer agg)
+      throws HiveException;
 
-  /** Merge with partial aggregation result.
-   *  NOTE: null might be passed in case there is no input data.
-   *  @param partial  The partial aggregation result.
+  /**
+   * Merge with partial aggregation result. NOTE: null might be passed in case
+   * there is no input data.
+   * 
+   * @param partial
+   *          The partial aggregation result.
    */
-  public abstract void merge(AggregationBuffer agg, Object partial) throws HiveException;
+  public abstract void merge(AggregationBuffer agg, Object partial)
+      throws HiveException;
 
-  /** Get final aggregation result.
-   *  @return final aggregation result.
+  /**
+   * Get final aggregation result.
+   * 
+   * @return final aggregation result.
    */
   public abstract Object terminate(AggregationBuffer agg) throws HiveException;
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java Thu Jan 21 10:37:58 2010
@@ -18,35 +18,35 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
- * A Generic User-defined aggregation function (GenericUDAF) for the use with 
+ * A Generic User-defined aggregation function (GenericUDAF) for the use with
  * Hive.
  * 
- * GenericUDAFResolver is used at compile time.  We use GenericUDAFResolver to
+ * GenericUDAFResolver is used at compile time. We use GenericUDAFResolver to
  * find out the GenericUDAFEvaluator for the parameter types.
  * 
  */
 public interface GenericUDAFResolver {
-  
-  /** Get the evaluator for the parameter types.
-   *  
-   *  The reason that this function returns an object instead of a class
-   *  is because it's possible that the object needs some configuration 
-   *  (that can be serialized).  In that case the class of the object has 
-   *  to implement the Serializable interface.  At execution time, we will 
-   *  deserialize the object from the plan and use it to evaluate the
-   *  aggregations.
-   *  
-   *  If the class of the object does not implement Serializable, then
-   *  we will create a new instance of the class at execution time.
-   *  
-   *  @param parameters  The types of the parameters. We need the type 
-   *         information to know which evaluator class to use.  
+
+  /**
+   * Get the evaluator for the parameter types.
+   * 
+   * The reason that this function returns an object instead of a class is
+   * because it's possible that the object needs some configuration (that can be
+   * serialized). In that case the class of the object has to implement the
+   * Serializable interface. At execution time, we will deserialize the object
+   * from the plan and use it to evaluate the aggregations.
+   * 
+   * If the class of the object does not implement Serializable, then we will
+   * create a new instance of the class at execution time.
+   * 
+   * @param parameters
+   *          The types of the parameters. We need the type information to know
+   *          which evaluator class to use.
    */
-  GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException;
+  GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException;
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java Thu Jan 21 10:37:58 2010
@@ -26,62 +26,60 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
- * Compute the standard deviation by extending GenericUDAFVariance and 
+ * Compute the standard deviation by extending GenericUDAFVariance and
  * overriding the terminate() method of the evaluator.
- *
+ * 
  */
-@description(
-    name = "std,stddev,stddev_pop",
-    value = "_FUNC_(x) - Returns the standard deviation of a set of numbers"
-)
+@description(name = "std,stddev,stddev_pop", value = "_FUNC_(x) - Returns the standard deviation of a set of numbers")
 public class GenericUDAFStd extends GenericUDAFVariance {
-  
+
   @Override
-  public GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
     }
-    
+
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
-          "Only primitive type arguments are accepted but " 
-          + parameters[0].getTypeName() + " is passed.");
+          "Only primitive type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
-    switch (((PrimitiveTypeInfo)parameters[0]).getPrimitiveCategory()) {
-      case BYTE:
-      case SHORT:
-      case INT:
-      case LONG:
-      case FLOAT:
-      case DOUBLE:
-      case STRING:
-        return new GenericUDAFStdEvaluator();
-      case BOOLEAN:
-      default:
-        throw new UDFArgumentTypeException(0,
-            "Only numeric or string type arguments are accepted but " 
-            + parameters[0].getTypeName() + " is passed.");
+    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+    case STRING:
+      return new GenericUDAFStdEvaluator();
+    case BOOLEAN:
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric or string type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
   }
-  
+
   /**
-   * Compute the standard deviation by extending GenericUDAFVarianceEvaluator 
+   * Compute the standard deviation by extending GenericUDAFVarianceEvaluator
    * and overriding the terminate() method of the evaluator.
-   *
+   * 
    */
-  public static class GenericUDAFStdEvaluator extends GenericUDAFVarianceEvaluator {
+  public static class GenericUDAFStdEvaluator extends
+      GenericUDAFVarianceEvaluator {
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      StdAgg myagg = (StdAgg)agg;
-      
+      StdAgg myagg = (StdAgg) agg;
+
       if (myagg.count == 0) { // SQL standard - return null for zero elements
         return null;
       } else {
-        if(myagg.count > 1) { 
-          result.set(Math.sqrt(myagg.variance / (myagg.count))); 
+        if (myagg.count > 1) {
+          result.set(Math.sqrt(myagg.variance / (myagg.count)));
         } else { // for one element the variance is always 0
           result.set(0);
         }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java Thu Jan 21 10:37:58 2010
@@ -26,63 +26,61 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
- * Compute the sample standard deviation by extending GenericUDAFVariance and 
+ * Compute the sample standard deviation by extending GenericUDAFVariance and
  * overriding the terminate() method of the evaluator.
- *
+ * 
  */
-@description(
-    name = "stddev_samp",
-    value = "_FUNC_(x) - Returns the sample standard deviation of a set of " +
-		"numbers"
-)
+@description(name = "stddev_samp", value = "_FUNC_(x) - Returns the sample standard deviation of a set of "
+    + "numbers")
 public class GenericUDAFStdSample extends GenericUDAFVariance {
-  
+
   @Override
-  public GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
     }
-    
+
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
-          "Only primitive type arguments are accepted but " 
-          + parameters[0].getTypeName() + " is passed.");
+          "Only primitive type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
-    switch (((PrimitiveTypeInfo)parameters[0]).getPrimitiveCategory()) {
-      case BYTE:
-      case SHORT:
-      case INT:
-      case LONG:
-      case FLOAT:
-      case DOUBLE:
-      case STRING:
-        return new GenericUDAFStdSampleEvaluator();
-      case BOOLEAN:
-      default:
-        throw new UDFArgumentTypeException(0,
-            "Only numeric or string type arguments are accepted but " 
-            + parameters[0].getTypeName() + " is passed.");
+    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+    case STRING:
+      return new GenericUDAFStdSampleEvaluator();
+    case BOOLEAN:
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric or string type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
   }
-  
+
   /**
-   * Compute the sample standard deviation by extending 
+   * Compute the sample standard deviation by extending
    * GenericUDAFVarianceEvaluator and overriding the terminate() method of the
-   * evaluator 
+   * evaluator
    */
-  public static class GenericUDAFStdSampleEvaluator extends GenericUDAFVarianceEvaluator {
+  public static class GenericUDAFStdSampleEvaluator extends
+      GenericUDAFVarianceEvaluator {
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      StdAgg myagg = (StdAgg)agg;
-      
+      StdAgg myagg = (StdAgg) agg;
+
       if (myagg.count == 0) { // SQL standard - return null for zero elements
         return null;
       } else {
-        if(myagg.count > 1) { 
-          result.set(Math.sqrt(myagg.variance / (myagg.count-1))); 
+        if (myagg.count > 1) {
+          result.set(Math.sqrt(myagg.variance / (myagg.count - 1)));
         } else { // for one element the variance is always 0
           result.set(0);
         }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java Thu Jan 21 10:37:58 2010
@@ -33,55 +33,54 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.util.StringUtils;
 
-@description(
-    name = "sum",
-    value = "_FUNC_(x) - Returns the sum of a set of numbers"
-)
+@description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers")
 public class GenericUDAFSum implements GenericUDAFResolver {
 
   static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName());
-  
+
   @Override
-  public GenericUDAFEvaluator getEvaluator(
-      TypeInfo[] parameters) throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
+      throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
     }
-    
+
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
-          "Only primitive type arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
+          "Only primitive type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
-    switch (((PrimitiveTypeInfo)parameters[0]).getPrimitiveCategory()) {
-      case BYTE:
-      case SHORT:
-      case INT:
-      case LONG:
-        return new GenericUDAFSumLong();
-      case FLOAT:
-      case DOUBLE:
-      case STRING:
-        return new GenericUDAFSumDouble();
-      case BOOLEAN:
-      default:
-        throw new UDFArgumentTypeException(0,
-            "Only numeric or string type arguments are accepted but " + parameters[0].getTypeName() + " is passed.");
+    switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+      return new GenericUDAFSumLong();
+    case FLOAT:
+    case DOUBLE:
+    case STRING:
+      return new GenericUDAFSumDouble();
+    case BOOLEAN:
+    default:
+      throw new UDFArgumentTypeException(0,
+          "Only numeric or string type arguments are accepted but "
+              + parameters[0].getTypeName() + " is passed.");
     }
   }
-  
+
   public static class GenericUDAFSumDouble extends GenericUDAFEvaluator {
 
     PrimitiveObjectInspector inputOI;
     DoubleWritable result;
-    
+
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
         throws HiveException {
-      assert(parameters.length == 1);
+      assert (parameters.length == 1);
       super.init(m, parameters);
       result = new DoubleWritable(0);
-      inputOI = (PrimitiveObjectInspector)parameters[0];
+      inputOI = (PrimitiveObjectInspector) parameters[0];
       return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
     }
 
@@ -90,7 +89,7 @@
       boolean empty;
       double sum;
     }
-    
+
     @Override
     public AggregationBuffer getNewAggregationBuffer() throws HiveException {
       SumDoubleAgg result = new SumDoubleAgg();
@@ -100,23 +99,27 @@
 
     @Override
     public void reset(AggregationBuffer agg) throws HiveException {
-      SumDoubleAgg myagg = (SumDoubleAgg)agg;
+      SumDoubleAgg myagg = (SumDoubleAgg) agg;
       myagg.empty = true;
-      myagg.sum = 0;      
+      myagg.sum = 0;
     }
 
     boolean warned = false;
-    
+
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
-      assert(parameters.length == 1);
+    public void iterate(AggregationBuffer agg, Object[] parameters)
+        throws HiveException {
+      assert (parameters.length == 1);
       try {
         merge(agg, parameters[0]);
       } catch (NumberFormatException e) {
         if (!warned) {
           warned = true;
-          LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
-          LOG.warn(getClass().getSimpleName() + " ignoring similar exceptions.");
+          LOG.warn(getClass().getSimpleName() + " "
+              + StringUtils.stringifyException(e));
+          LOG
+              .warn(getClass().getSimpleName()
+                  + " ignoring similar exceptions.");
         }
       }
     }
@@ -127,17 +130,18 @@
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+    public void merge(AggregationBuffer agg, Object partial)
+        throws HiveException {
       if (partial != null) {
-        SumDoubleAgg myagg = (SumDoubleAgg)agg;
+        SumDoubleAgg myagg = (SumDoubleAgg) agg;
         myagg.empty = false;
-        myagg.sum += PrimitiveObjectInspectorUtils.getDouble(partial, inputOI); 
+        myagg.sum += PrimitiveObjectInspectorUtils.getDouble(partial, inputOI);
       }
     }
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      SumDoubleAgg myagg = (SumDoubleAgg)agg;
+      SumDoubleAgg myagg = (SumDoubleAgg) agg;
       if (myagg.empty) {
         return null;
       }
@@ -146,20 +150,19 @@
     }
 
   }
-  
 
   public static class GenericUDAFSumLong extends GenericUDAFEvaluator {
 
     PrimitiveObjectInspector inputOI;
     LongWritable result;
-    
+
     @Override
     public ObjectInspector init(Mode m, ObjectInspector[] parameters)
         throws HiveException {
-      assert(parameters.length == 1);
+      assert (parameters.length == 1);
       super.init(m, parameters);
       result = new LongWritable(0);
-      inputOI = (PrimitiveObjectInspector)parameters[0];
+      inputOI = (PrimitiveObjectInspector) parameters[0];
       return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
     }
 
@@ -168,7 +171,7 @@
       boolean empty;
       long sum;
     }
-    
+
     @Override
     public AggregationBuffer getNewAggregationBuffer() throws HiveException {
       SumLongAgg result = new SumLongAgg();
@@ -178,22 +181,24 @@
 
     @Override
     public void reset(AggregationBuffer agg) throws HiveException {
-      SumLongAgg myagg = (SumLongAgg)agg;
+      SumLongAgg myagg = (SumLongAgg) agg;
       myagg.empty = true;
-      myagg.sum = 0;      
+      myagg.sum = 0;
     }
 
     boolean warned = false;
-    
+
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
-      assert(parameters.length == 1);
+    public void iterate(AggregationBuffer agg, Object[] parameters)
+        throws HiveException {
+      assert (parameters.length == 1);
       try {
         merge(agg, parameters[0]);
       } catch (NumberFormatException e) {
         if (!warned) {
           warned = true;
-          LOG.warn(getClass().getSimpleName() + " " + StringUtils.stringifyException(e));
+          LOG.warn(getClass().getSimpleName() + " "
+              + StringUtils.stringifyException(e));
         }
       }
     }
@@ -204,17 +209,18 @@
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+    public void merge(AggregationBuffer agg, Object partial)
+        throws HiveException {
       if (partial != null) {
-        SumLongAgg myagg = (SumLongAgg)agg;
-        myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial, inputOI); 
+        SumLongAgg myagg = (SumLongAgg) agg;
+        myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial, inputOI);
         myagg.empty = false;
       }
     }
 
     @Override
     public Object terminate(AggregationBuffer agg) throws HiveException {
-      SumLongAgg myagg = (SumLongAgg)agg;
+      SumLongAgg myagg = (SumLongAgg) agg;
       if (myagg.empty) {
         return null;
       }
@@ -223,6 +229,5 @@
     }
 
   }
-  
 
 }