You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/02/09 08:55:50 UTC

svn commit: r907950 [10/15] - in /hadoop/hive/trunk: ./ checkstyle/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/java/org/apache/hadoop/hive/contrib/fil...

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java Tue Feb  9 07:55:30 2010
@@ -37,25 +37,29 @@
 @UDFType(deterministic = true)
 public abstract class GenericUDAFEvaluator {
 
-  static public enum Mode {
+  /**
+   * Mode.
+   *
+   */
+  public static enum Mode {
     /**
      * PARTIAL1: from original data to partial aggregation data: iterate() and
-     * terminatePartial() will be called
+     * terminatePartial() will be called.
      */
     PARTIAL1,
-    /**
+        /**
      * PARTIAL2: from partial aggregation data to partial aggregation data:
-     * merge() and terminatePartial() will be called
+     * merge() and terminatePartial() will be called.
      */
     PARTIAL2,
-    /**
+        /**
      * FINAL: from partial aggregation to full aggregation: merge() and
-     * terminate() will be called
+     * terminate() will be called.
      */
     FINAL,
-    /**
+        /**
      * COMPLETE: from original data directly to full aggregation: iterate() and
-     * terminate() will be called
+     * terminate() will be called.
      */
     COMPLETE
   };
@@ -63,7 +67,7 @@
   Mode mode;
 
   /**
-   * The constructor
+   * The constructor.
    */
   public GenericUDAFEvaluator() {
   }
@@ -89,8 +93,7 @@
    *         execution time. 2. We call GenericUDAFResolver.getEvaluator at
    *         compilation time,
    */
-  public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-      throws HiveException {
+  public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
     // This function should be overriden in every sub class
     // And the sub class should call super.init(m, parameters) to get mode set.
     mode = m;
@@ -114,8 +117,7 @@
   /**
    * Get a new aggregation object.
    */
-  public abstract AggregationBuffer getNewAggregationBuffer()
-      throws HiveException;
+  public abstract AggregationBuffer getNewAggregationBuffer() throws HiveException;
 
   /**
    * Reset the aggregation. This is useful if we want to reuse the same
@@ -132,8 +134,7 @@
    * @param parameters
    *          The row, can be inspected by the OIs passed in init().
    */
-  public void aggregate(AggregationBuffer agg, Object[] parameters)
-      throws HiveException {
+  public void aggregate(AggregationBuffer agg, Object[] parameters) throws HiveException {
     if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
       iterate(agg, parameters);
     } else {
@@ -163,16 +164,14 @@
    * @param parameters
    *          The objects of parameters.
    */
-  public abstract void iterate(AggregationBuffer agg, Object[] parameters)
-      throws HiveException;
+  public abstract void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException;
 
   /**
    * Get partial aggregation result.
    * 
    * @return partial aggregation result.
    */
-  public abstract Object terminatePartial(AggregationBuffer agg)
-      throws HiveException;
+  public abstract Object terminatePartial(AggregationBuffer agg) throws HiveException;
 
   /**
    * Merge with partial aggregation result. NOTE: null might be passed in case
@@ -181,8 +180,7 @@
    * @param partial
    *          The partial aggregation result.
    */
-  public abstract void merge(AggregationBuffer agg, Object partial)
-      throws HiveException;
+  public abstract void merge(AggregationBuffer agg, Object partial) throws HiveException;
 
   /**
    * Get final aggregation result.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFResolver.java Tue Feb  9 07:55:30 2010
@@ -47,6 +47,5 @@
    *          The types of the parameters. We need the type information to know
    *          which evaluator class to use.
    */
-  GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-      throws SemanticException;
+  GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException;
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java Tue Feb  9 07:55:30 2010
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -30,7 +30,8 @@
  * overriding the terminate() method of the evaluator.
  * 
  */
-@Description(name = "std,stddev,stddev_pop", value = "_FUNC_(x) - Returns the standard deviation of a set of numbers")
+@Description(name = "std,stddev,stddev_pop",
+    value = "_FUNC_(x) - Returns the standard deviation of a set of numbers")
 public class GenericUDAFStd extends GenericUDAFVariance {
 
   @Override
@@ -44,7 +45,7 @@
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
     switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
     case BYTE:
@@ -59,7 +60,7 @@
     default:
       throw new UDFArgumentTypeException(0,
           "Only numeric or string type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
   }
 
@@ -79,11 +80,11 @@
         return null;
       } else {
         if (myagg.count > 1) {
-          result.set(Math.sqrt(myagg.variance / (myagg.count)));
+          getResult().set(Math.sqrt(myagg.variance / (myagg.count)));
         } else { // for one element the variance is always 0
-          result.set(0);
+          getResult().set(0);
         }
-        return result;
+        return getResult();
       }
     }
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStdSample.java Tue Feb  9 07:55:30 2010
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -30,13 +30,12 @@
  * overriding the terminate() method of the evaluator.
  * 
  */
-@Description(name = "stddev_samp", value = "_FUNC_(x) - Returns the sample standard deviation of a set of "
-    + "numbers")
+@Description(name = "stddev_samp",
+    value = "_FUNC_(x) - Returns the sample standard deviation of a set of numbers")
 public class GenericUDAFStdSample extends GenericUDAFVariance {
 
   @Override
-  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-      throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
@@ -45,7 +44,7 @@
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
     switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
     case BYTE:
@@ -60,14 +59,14 @@
     default:
       throw new UDFArgumentTypeException(0,
           "Only numeric or string type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
   }
 
   /**
    * Compute the sample standard deviation by extending
    * GenericUDAFVarianceEvaluator and overriding the terminate() method of the
-   * evaluator
+   * evaluator.
    */
   public static class GenericUDAFStdSampleEvaluator extends
       GenericUDAFVarianceEvaluator {
@@ -80,11 +79,11 @@
         return null;
       } else {
         if (myagg.count > 1) {
-          result.set(Math.sqrt(myagg.variance / (myagg.count - 1)));
+          getResult().set(Math.sqrt(myagg.variance / (myagg.count - 1)));
         } else { // for one element the variance is always 0
-          result.set(0);
+          getResult().set(0);
         }
-        return result;
+        return getResult();
       }
     }
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java Tue Feb  9 07:55:30 2010
@@ -19,8 +19,8 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -33,14 +33,17 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.util.StringUtils;
 
+/**
+ * GenericUDAFSum.
+ *
+ */
 @Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers")
 public class GenericUDAFSum implements GenericUDAFResolver {
 
   static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName());
 
   @Override
-  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-      throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
@@ -49,7 +52,7 @@
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
     switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
     case BYTE:
@@ -65,18 +68,20 @@
     default:
       throw new UDFArgumentTypeException(0,
           "Only numeric or string type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
   }
 
+  /**
+   * GenericUDAFSumDouble.
+   *
+   */
   public static class GenericUDAFSumDouble extends GenericUDAFEvaluator {
-
-    PrimitiveObjectInspector inputOI;
-    DoubleWritable result;
+    private PrimitiveObjectInspector inputOI;
+    private DoubleWritable result;
 
     @Override
-    public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-        throws HiveException {
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
       assert (parameters.length == 1);
       super.init(m, parameters);
       result = new DoubleWritable(0);
@@ -84,7 +89,7 @@
       return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
     }
 
-    /** class for storing double sum value */
+    /** class for storing double sum value. */
     static class SumDoubleAgg implements AggregationBuffer {
       boolean empty;
       double sum;
@@ -107,8 +112,7 @@
     boolean warned = false;
 
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters)
-        throws HiveException {
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
       assert (parameters.length == 1);
       try {
         merge(agg, parameters[0]);
@@ -119,7 +123,7 @@
               + StringUtils.stringifyException(e));
           LOG
               .warn(getClass().getSimpleName()
-                  + " ignoring similar exceptions.");
+              + " ignoring similar exceptions.");
         }
       }
     }
@@ -130,8 +134,7 @@
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial)
-        throws HiveException {
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
       if (partial != null) {
         SumDoubleAgg myagg = (SumDoubleAgg) agg;
         myagg.empty = false;
@@ -151,14 +154,16 @@
 
   }
 
+  /**
+   * GenericUDAFSumLong.
+   *
+   */
   public static class GenericUDAFSumLong extends GenericUDAFEvaluator {
-
-    PrimitiveObjectInspector inputOI;
-    LongWritable result;
+    private PrimitiveObjectInspector inputOI;
+    private LongWritable result;
 
     @Override
-    public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-        throws HiveException {
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
       assert (parameters.length == 1);
       super.init(m, parameters);
       result = new LongWritable(0);
@@ -166,7 +171,7 @@
       return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
     }
 
-    /** class for storing double sum value */
+    /** class for storing double sum value. */
     static class SumLongAgg implements AggregationBuffer {
       boolean empty;
       long sum;
@@ -186,11 +191,10 @@
       myagg.sum = 0;
     }
 
-    boolean warned = false;
+    private boolean warned = false;
 
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters)
-        throws HiveException {
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
       assert (parameters.length == 1);
       try {
         merge(agg, parameters[0]);
@@ -209,8 +213,7 @@
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial)
-        throws HiveException {
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
       if (partial != null) {
         SumLongAgg myagg = (SumLongAgg) agg;
         myagg.sum += PrimitiveObjectInspectorUtils.getLong(partial, inputOI);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java Tue Feb  9 07:55:30 2010
@@ -21,8 +21,8 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -45,14 +45,14 @@
  * GenericUDAFStd GenericUDAFStdSample
  * 
  */
-@Description(name = "variance,var_pop", value = "_FUNC_(x) - Returns the variance of a set of numbers")
+@Description(name = "variance,var_pop",
+    value = "_FUNC_(x) - Returns the variance of a set of numbers")
 public class GenericUDAFVariance implements GenericUDAFResolver {
 
   static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class.getName());
 
   @Override
-  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-      throws SemanticException {
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
     if (parameters.length != 1) {
       throw new UDFArgumentTypeException(parameters.length - 1,
           "Exactly one argument is expected.");
@@ -61,7 +61,7 @@
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
     switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
     case BYTE:
@@ -76,7 +76,7 @@
     default:
       throw new UDFArgumentTypeException(0,
           "Only numeric or string type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
   }
 
@@ -96,26 +96,25 @@
   public static class GenericUDAFVarianceEvaluator extends GenericUDAFEvaluator {
 
     // For PARTIAL1 and COMPLETE
-    PrimitiveObjectInspector inputOI;
+    private PrimitiveObjectInspector inputOI;
 
     // For PARTIAL2 and FINAL
-    StructObjectInspector soi;
-    StructField countField;
-    StructField sumField;
-    StructField varianceField;
-    LongObjectInspector countFieldOI;
-    DoubleObjectInspector sumFieldOI;
-    DoubleObjectInspector varianceFieldOI;
+    private StructObjectInspector soi;
+    private StructField countField;
+    private StructField sumField;
+    private StructField varianceField;
+    private LongObjectInspector countFieldOI;
+    private DoubleObjectInspector sumFieldOI;
+    private DoubleObjectInspector varianceFieldOI;
 
     // For PARTIAL1 and PARTIAL2
-    Object[] partialResult;
+    private Object[] partialResult;
 
     // For FINAL and COMPLETE
-    DoubleWritable result;
+    private DoubleWritable result;
 
     @Override
-    public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-        throws HiveException {
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
       assert (parameters.length == 1);
       super.init(m, parameters);
 
@@ -161,7 +160,7 @@
             foi);
 
       } else {
-        result = new DoubleWritable(0);
+        setResult(new DoubleWritable(0));
         return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
       }
     }
@@ -187,7 +186,7 @@
       myagg.variance = 0;
     }
 
-    boolean warned = false;
+    private boolean warned = false;
 
     @Override
     public void iterate(AggregationBuffer agg, Object[] parameters)
@@ -200,7 +199,7 @@
           double v = PrimitiveObjectInspectorUtils.getDouble(p, inputOI);
 
           if (myagg.count != 0) { // if count==0 => the variance is going to be
-                                  // 0
+            // 0
             // after 1 iteration
             double alpha = (myagg.sum + v) / (myagg.count + 1) - myagg.sum
                 / myagg.count;
@@ -234,8 +233,7 @@
     }
 
     @Override
-    public void merge(AggregationBuffer agg, Object partial)
-        throws HiveException {
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
       if (partial != null) {
         StdAgg myagg = (StdAgg) agg;
 
@@ -280,13 +278,21 @@
         return null;
       } else {
         if (myagg.count > 1) {
-          result.set(myagg.variance / (myagg.count));
+          getResult().set(myagg.variance / (myagg.count));
         } else { // for one element the variance is always 0
-          result.set(0);
+          getResult().set(0);
         }
-        return result;
+        return getResult();
       }
     }
+
+    public void setResult(DoubleWritable result) {
+      this.result = result;
+    }
+
+    public DoubleWritable getResult() {
+      return result;
+    }
   }
 
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVarianceSample.java Tue Feb  9 07:55:30 2010
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -30,7 +30,8 @@
  * the terminate() method of the evaluator.
  * 
  */
-@Description(name = "var_samp", value = "_FUNC_(x) - Returns the sample variance of a set of numbers")
+@Description(name = "var_samp",
+    value = "_FUNC_(x) - Returns the sample variance of a set of numbers")
 public class GenericUDAFVarianceSample extends GenericUDAFVariance {
 
   @Override
@@ -44,7 +45,7 @@
     if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
       throw new UDFArgumentTypeException(0,
           "Only primitive type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
     switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) {
     case BYTE:
@@ -59,13 +60,13 @@
     default:
       throw new UDFArgumentTypeException(0,
           "Only numeric or string type arguments are accepted but "
-              + parameters[0].getTypeName() + " is passed.");
+          + parameters[0].getTypeName() + " is passed.");
     }
   }
 
   /**
    * Compute the sample variance by extending GenericUDAFVarianceEvaluator and
-   * overriding the terminate() method of the evaluator
+   * overriding the terminate() method of the evaluator.
    */
   public static class GenericUDAFVarianceSampleEvaluator extends
       GenericUDAFVarianceEvaluator {
@@ -78,11 +79,11 @@
         return null;
       } else {
         if (myagg.count > 1) {
-          result.set(myagg.variance / (myagg.count - 1));
+          getResult().set(myagg.variance / (myagg.count - 1));
         } else { // for one element the variance is always 0
-          result.set(0);
+          getResult().set(0);
         }
-        return result;
+        return getResult();
       }
     }
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Tue Feb  9 07:55:30 2010
@@ -43,11 +43,11 @@
    * GenericUDF use DeferedObject to pass arguments.
    */
   public static interface DeferredObject {
-    public Object get() throws HiveException;
+    Object get() throws HiveException;
   };
 
   /**
-   * The constructor
+   * The constructor.
    */
   public GenericUDF() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java Tue Feb  9 07:55:30 2010
@@ -20,9 +20,9 @@
 
 import java.util.ArrayList;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -31,15 +31,18 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
-@Description(name = "array", value = "_FUNC_(n0, n1...) - Creates an array with the given elements ")
+/**
+ * GenericUDFArray.
+ *
+ */
+@Description(name = "array",
+    value = "_FUNC_(n0, n1...) - Creates an array with the given elements ")
 public class GenericUDFArray extends GenericUDF {
-
-  Converter[] converters;
-  ArrayList<Object> ret = new ArrayList<Object>();
+  private Converter[] converters;
+  private ArrayList<Object> ret = new ArrayList<Object>();
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
 
     GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java Tue Feb  9 07:55:30 2010
@@ -22,8 +22,6 @@
 import java.lang.reflect.Method;
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -45,9 +43,6 @@
  * 
  */
 public class GenericUDFBridge extends GenericUDF implements Serializable {
-
-  private static Log LOG = LogFactory.getLog(GenericUDFBridge.class.getName());
-
   /**
    * The name of the UDF.
    */
@@ -121,13 +116,12 @@
    */
   transient UDF udf;
   /**
-   * The non-deferred real arguments for method invocation
+   * The non-deferred real arguments for method invocation.
    */
   transient Object[] realArguments;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
 
     udf = (UDF) ReflectionUtils.newInstance(udfClass, null);
 
@@ -149,7 +143,7 @@
     // Get the return ObjectInspector.
     ObjectInspector returnOI = ObjectInspectorFactory
         .getReflectionObjectInspector(udfMethod.getGenericReturnType(),
-            ObjectInspectorOptions.JAVA);
+        ObjectInspectorOptions.JAVA);
 
     return returnOI;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java Tue Feb  9 07:55:30 2010
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -34,16 +32,12 @@
  * and f should have the same TypeInfo, or an exception will be thrown.
  */
 public class GenericUDFCase extends GenericUDF {
-
-  private static Log LOG = LogFactory.getLog(GenericUDFCase.class.getName());
-
-  ObjectInspector[] argumentOIs;
-  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
-  GenericUDFUtils.ReturnObjectInspectorResolver caseOIResolver;
+  private ObjectInspector[] argumentOIs;
+  private GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+  private GenericUDFUtils.ReturnObjectInspectorResolver caseOIResolver;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentTypeException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentTypeException {
 
     argumentOIs = arguments;
     caseOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
@@ -55,15 +49,15 @@
       if (!caseOIResolver.update(arguments[i])) {
         throw new UDFArgumentTypeException(i,
             "The expressions after WHEN should have the same type with that after CASE: \""
-                + caseOIResolver.get().getTypeName() + "\" is expected but \""
-                + arguments[i].getTypeName() + "\" is found");
+            + caseOIResolver.get().getTypeName() + "\" is expected but \""
+            + arguments[i].getTypeName() + "\" is found");
       }
       if (!returnOIResolver.update(arguments[i + 1])) {
         throw new UDFArgumentTypeException(i + 1,
             "The expressions after THEN should have the same type: \""
-                + returnOIResolver.get().getTypeName()
-                + "\" is expected but \"" + arguments[i + 1].getTypeName()
-                + "\" is found");
+            + returnOIResolver.get().getTypeName()
+            + "\" is expected but \"" + arguments[i + 1].getTypeName()
+            + "\" is found");
       }
     }
     if (arguments.length % 2 == 0) {
@@ -71,9 +65,9 @@
       if (!returnOIResolver.update(arguments[i + 1])) {
         throw new UDFArgumentTypeException(i + 1,
             "The expression after ELSE should have the same type as those after THEN: \""
-                + returnOIResolver.get().getTypeName()
-                + "\" is expected but \"" + arguments[i + 1].getTypeName()
-                + "\" is found");
+            + returnOIResolver.get().getTypeName()
+            + "\" is expected but \"" + arguments[i + 1].getTypeName()
+            + "\" is found");
       }
     }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,8 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
@@ -31,19 +29,16 @@
  * NOTES: 1. a, b and c should have the same TypeInfo, or an exception will be
  * thrown.
  */
-@Description(name = "coalesce", value = "_FUNC_(a1, a2, ...) - Returns the first non-null argument", extended = "Example:\n"
+@Description(name = "coalesce",
+    value = "_FUNC_(a1, a2, ...) - Returns the first non-null argument",
+    extended = "Example:\n"
     + "  > SELECT _FUNC_(NULL, 1, NULL) FROM src LIMIT 1;\n" + "  1")
 public class GenericUDFCoalesce extends GenericUDF {
-
-  private static Log LOG = LogFactory
-      .getLog(GenericUDFCoalesce.class.getName());
-
-  ObjectInspector[] argumentOIs;
-  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+  private ObjectInspector[] argumentOIs;
+  private GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentTypeException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentTypeException {
 
     argumentOIs = arguments;
 
@@ -52,9 +47,9 @@
       if (!returnOIResolver.update(arguments[i])) {
         throw new UDFArgumentTypeException(i,
             "The expressions after COALESCE should all have the same type: \""
-                + returnOIResolver.get().getTypeName()
-                + "\" is expected but \"" + arguments[i].getTypeName()
-                + "\" is found");
+            + returnOIResolver.get().getTypeName()
+            + "\" is expected but \"" + arguments[i].getTypeName()
+            + "\" is found");
       }
     }
     return returnOIResolver.get();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -38,16 +38,15 @@
  * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF
  */
 @Description(name = "concat_ws", value = "_FUNC_(separator, str1, str2, ...) - "
-    + "returns the concatenation of the strings separated by the separator.", extended = "Example:\n"
+    + "returns the concatenation of the strings separated by the separator.",
+    extended = "Example:\n"
     + "  > SELECT _FUNC_('ce', 'fa', 'book') FROM src LIMIT 1;\n"
     + "  'facebook'")
 public class GenericUDFConcatWS extends GenericUDF {
-
-  ObjectInspector[] argumentOIs;
+  private ObjectInspector[] argumentOIs;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length < 2) {
       throw new UDFArgumentLengthException(
           "The function CONCAT_WS(separator,str1,str2,str3,...) needs at least two arguments.");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFElt.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -36,15 +36,15 @@
  * 
  * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF
  */
-@Description(name = "elt", value = "_FUNC_(n, str1, str2, ...) - returns the n-th string", extended = "Example:\n"
+@Description(name = "elt",
+    value = "_FUNC_(n, str1, str2, ...) - returns the n-th string",
+    extended = "Example:\n"
     + "  > SELECT _FUNC_(1, 'face', 'book') FROM src LIMIT 1;\n" + "  'face'")
 public class GenericUDFElt extends GenericUDF {
-
-  ObjectInspectorConverters.Converter[] converters;
+  private ObjectInspectorConverters.Converter[] converters;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length < 2) {
       throw new UDFArgumentLengthException(
           "The function ELT(N,str1,str2,str3,...) needs at least two arguments.");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFField.java Tue Feb  9 07:55:30 2010
@@ -19,21 +19,26 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 
-@Description(name = "field", value = "_FUNC_(str, str1, str2, ...) - returns the index of str in the str1,str2,... list or 0 if not found", extended = "All primitive types are supported, arguments are compared using str.equals(x)."
+/**
+ * GenericUDFField.
+ *
+ */
+@Description(name = "field", value = "_FUNC_(str, str1, str2, ...) - "
+    + "returns the index of str in the str1,str2,... list or 0 if not found",
+    extended = "All primitive types are supported, arguments are compared using str.equals(x)."
     + " If str is NULL, the return value is 0.")
 public class GenericUDFField extends GenericUDF {
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length < 2) {
       throw new UDFArgumentException(
           "The function FIELD(str, str1, str2, ...) needs at least two arguments.");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFHash.java Tue Feb  9 07:55:30 2010
@@ -19,10 +19,8 @@
 package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
@@ -34,20 +32,16 @@
  */
 @Description(name = "hash", value = "_FUNC_(a1, a2, ...) - Returns a hash value of the arguments")
 public class GenericUDFHash extends GenericUDF {
-
-  private static Log LOG = LogFactory.getLog(GenericUDFHash.class.getName());
-
-  ObjectInspector[] argumentOIs;
+  private ObjectInspector[] argumentOIs;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentTypeException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentTypeException {
 
     argumentOIs = arguments;
     return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
   }
 
-  IntWritable result = new IntWritable();
+  private IntWritable result = new IntWritable();
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java Tue Feb  9 07:55:30 2010
@@ -34,13 +34,11 @@
  * on the context in which it is used.
  */
 public class GenericUDFIf extends GenericUDF {
-
-  ObjectInspector[] argumentOIs;
-  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+  private ObjectInspector[] argumentOIs;
+  private GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     argumentOIs = arguments;
     returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
 
@@ -52,22 +50,22 @@
     boolean conditionTypeIsOk = (arguments[0].getCategory() == ObjectInspector.Category.PRIMITIVE);
     if (conditionTypeIsOk) {
       PrimitiveObjectInspector poi = ((PrimitiveObjectInspector) arguments[0]);
-      conditionTypeIsOk = (poi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN || poi
-          .getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.VOID);
+      conditionTypeIsOk = (poi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN
+          || poi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.VOID);
     }
     if (!conditionTypeIsOk) {
       throw new UDFArgumentTypeException(0,
           "The first argument of function IF should be \""
-              + Constants.BOOLEAN_TYPE_NAME + "\", but \""
-              + arguments[0].getTypeName() + "\" is found");
+          + Constants.BOOLEAN_TYPE_NAME + "\", but \""
+          + arguments[0].getTypeName() + "\" is found");
     }
 
     if (!(returnOIResolver.update(arguments[1]) && returnOIResolver
         .update(arguments[2]))) {
       throw new UDFArgumentTypeException(2,
           "The second and the third arguments of function IF should have the same type, "
-              + "but they are different: \"" + arguments[1].getTypeName()
-              + "\" and \"" + arguments[2].getTypeName() + "\"");
+          + "but they are different: \"" + arguments[1].getTypeName()
+          + "\" and \"" + arguments[2].getTypeName() + "\"");
     }
 
     return returnOIResolver.get();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIndex.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -29,8 +29,11 @@
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.io.IntWritable;
 
+/**
+ * GenericUDFIndex.
+ *
+ */
 @Description(name = "index", value = "_FUNC_(a, n) - Returns the n-th element of a ")
 public class GenericUDFIndex extends GenericUDF {
   private MapObjectInspector mapOI;
@@ -38,11 +41,9 @@
   private ListObjectInspector listOI;
   private PrimitiveObjectInspector indexOI;
   private ObjectInspector returnOI;
-  private final IntWritable result = new IntWritable(-1);
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length != 2) {
       throw new UDFArgumentLengthException(
           "The function INDEX accepts exactly 2 arguments.");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInstr.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -41,16 +41,16 @@
  * </pre>
  * <p>
  */
-@Description(name = "instr", value = "_FUNC_(str, substr) - Returns the index of the first occurance "
-    + "of substr in str", extended = "Example:\n"
+@Description(name = "instr",
+    value = "_FUNC_(str, substr) - Returns the index of the first occurance of substr in str",
+    extended = "Example:\n"
     + "  > SELECT _FUNC_('Facebook', 'boo') FROM src LIMIT 1;\n" + "  5")
 public class GenericUDFInstr extends GenericUDF {
 
   ObjectInspectorConverters.Converter[] converters;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length != 2) {
       throw new UDFArgumentLengthException(
           "The function INSTR accepts exactly 2 arguments.");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLocate.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -42,16 +42,16 @@
  * </pre>
  * <p>
  */
-@Description(name = "locate", value = "_FUNC_(substr, str[, pos]) - Returns the position of the first "
-    + "occurance of substr in str after position pos", extended = "Example:\n"
+@Description(name = "locate",
+    value = "_FUNC_(substr, str[, pos]) - Returns the position of the first "
+    + "occurance of substr in str after position pos",
+    extended = "Example:\n"
     + "  > SELECT _FUNC_('bar', 'foobarbar', 5) FROM src LIMIT 1;\n" + "  7")
 public class GenericUDFLocate extends GenericUDF {
-
-  ObjectInspectorConverters.Converter[] converters;
+  private ObjectInspectorConverters.Converter[] converters;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length != 2 && arguments.length != 3) {
       throw new UDFArgumentLengthException(
           "The function LOCATE accepts exactly 2 or 3 arguments.");
@@ -82,7 +82,7 @@
     return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
   }
 
-  IntWritable intWritable = new IntWritable(0);
+  private IntWritable intWritable = new IntWritable(0);
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java Tue Feb  9 07:55:30 2010
@@ -20,10 +20,10 @@
 
 import java.util.HashMap;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -32,24 +32,28 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 
-@Description(name = "map", value = "_FUNC_(key0, value0, key1, value1...) - Creates a map with the given key/value pairs ")
+/**
+ * GenericUDFMap.
+ *
+ */
+@Description(name = "map", value = "_FUNC_(key0, value0, key1, value1...) - "
+    + "Creates a map with the given key/value pairs ")
 public class GenericUDFMap extends GenericUDF {
   Converter[] converters;
   HashMap<Object, Object> ret = new HashMap<Object, Object>();
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
 
     if (arguments.length % 2 != 0) {
       throw new UDFArgumentLengthException(
           "Arguments must be in key/value pairs");
     }
 
-    GenericUDFUtils.ReturnObjectInspectorResolver keyOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(
-        true);
-    GenericUDFUtils.ReturnObjectInspectorResolver valueOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(
-        true);
+    GenericUDFUtils.ReturnObjectInspectorResolver keyOIResolver =
+        new GenericUDFUtils.ReturnObjectInspectorResolver(true);
+    GenericUDFUtils.ReturnObjectInspectorResolver valueOIResolver =
+        new GenericUDFUtils.ReturnObjectInspectorResolver(true);
 
     for (int i = 0; i < arguments.length; i++) {
       if (i % 2 == 0) {
@@ -57,7 +61,7 @@
         if (!(arguments[i] instanceof PrimitiveObjectInspector)) {
           throw new UDFArgumentTypeException(1,
               "Primitive Type is expected but " + arguments[i].getTypeName()
-                  + "\" is found");
+              + "\" is found");
         }
         if (!keyOIResolver.update(arguments[i])) {
           throw new UDFArgumentTypeException(i, "Key type \""

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotNull.java Tue Feb  9 07:55:30 2010
@@ -18,22 +18,25 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.BooleanWritable;
 
-@Description(name = "isnotnull", value = "_FUNC_ a - Returns true if a is not NULL and false otherwise")
+/**
+ * GenericUDFOPNotNull.
+ *
+ */
+@Description(name = "isnotnull",
+    value = "_FUNC_ a - Returns true if a is not NULL and false otherwise")
 public class GenericUDFOPNotNull extends GenericUDF {
-
-  BooleanWritable result = new BooleanWritable();
+  private BooleanWritable result = new BooleanWritable();
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length != 1) {
       throw new UDFArgumentLengthException(
           "The operator 'IS NOT NULL' only accepts 1 argument.");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNull.java Tue Feb  9 07:55:30 2010
@@ -18,18 +18,21 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.BooleanWritable;
 
+/**
+ * GenericUDFOPNull.
+ *
+ */
 @Description(name = "isnull", value = "_FUNC_ a - Returns true if a is NULL and false otherwise")
 public class GenericUDFOPNull extends GenericUDF {
-
-  BooleanWritable result = new BooleanWritable();
+  private BooleanWritable result = new BooleanWritable();
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSize.java Tue Feb  9 07:55:30 2010
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -31,6 +31,10 @@
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 
+/**
+ * GenericUDFSize.
+ *
+ */
 @Description(name = "size", value = "_FUNC_(a) - Returns the size of a")
 public class GenericUDFSize extends GenericUDF {
   private ObjectInspector returnOI;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java Tue Feb  9 07:55:30 2010
@@ -20,9 +20,9 @@
 
 import java.util.ArrayList;
 
+import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -30,6 +30,10 @@
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
+/**
+ * GenericUDFSplit.
+ *
+ */
 @Description(name = "split", value = "_FUNC_(str, regex) - Splits str around occurances that match "
     + "regex", extended = "Example:\n"
     + "  > SELECT _FUNC_('oneAtwoBthreeC', '[ABC]') FROM src LIMIT 1;\n"
@@ -38,8 +42,7 @@
   private ObjectInspectorConverters.Converter[] converters;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length != 2) {
       throw new UDFArgumentLengthException(
           "The function SPLIT(s, regexp) takes exactly 2 arguments.");
@@ -52,7 +55,8 @@
     }
 
     return ObjectInspectorFactory
-        .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+        .getStandardListObjectInspector(PrimitiveObjectInspectorFactory
+            .writableStringObjectInspector);
   }
 
   @Override

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java Tue Feb  9 07:55:30 2010
@@ -25,8 +25,6 @@
 import java.nio.ByteBuffer;
 import java.util.HashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -47,10 +45,7 @@
 /**
  * Util functions for GenericUDF classes.
  */
-public class GenericUDFUtils {
-
-  private static Log LOG = LogFactory.getLog(GenericUDFUtils.class.getName());
-
+public final class GenericUDFUtils {
   /**
    * Checks if b is the first byte of a UTF-8 character.
    * 
@@ -121,7 +116,7 @@
         // but ObjectInspectors are different.
         returnObjectInspector = ObjectInspectorUtils
             .getStandardObjectInspector(returnObjectInspector,
-                ObjectInspectorCopyOption.WRITABLE);
+            ObjectInspectorCopyOption.WRITABLE);
         return true;
       }
 
@@ -220,7 +215,7 @@
       // or String[] etc in the last argument.
       lastParaElementType = TypeInfoUtils
           .getArrayElementType(methodParameterTypes.length == 0 ? null
-              : methodParameterTypes[methodParameterTypes.length - 1]);
+          : methodParameterTypes[methodParameterTypes.length - 1]);
       isVariableLengthArgument = (lastParaElementType != null);
 
       // Create the output OI array
@@ -244,11 +239,11 @@
           if (methodParameterTypes[i] == Object.class) {
             methodParameterOIs[i] = ObjectInspectorUtils
                 .getStandardObjectInspector(parameterOIs[i],
-                    ObjectInspectorCopyOption.JAVA);
+                ObjectInspectorCopyOption.JAVA);
           } else {
             methodParameterOIs[i] = ObjectInspectorFactory
                 .getReflectionObjectInspector(methodParameterTypes[i],
-                    ObjectInspectorOptions.JAVA);
+                ObjectInspectorOptions.JAVA);
           }
         }
 
@@ -259,14 +254,14 @@
           for (int i = methodParameterTypes.length - 1; i < parameterOIs.length; i++) {
             methodParameterOIs[i] = ObjectInspectorUtils
                 .getStandardObjectInspector(parameterOIs[i],
-                    ObjectInspectorCopyOption.JAVA);
+                ObjectInspectorCopyOption.JAVA);
           }
         } else {
           // This method takes something like String[], so it only accepts
           // something like String
           ObjectInspector oi = ObjectInspectorFactory
               .getReflectionObjectInspector(lastParaElementType,
-                  ObjectInspectorOptions.JAVA);
+              ObjectInspectorOptions.JAVA);
           for (int i = methodParameterTypes.length - 1; i < parameterOIs.length; i++) {
             methodParameterOIs[i] = oi;
           }
@@ -289,11 +284,11 @@
           if (methodParameterTypes[i] == Object.class) {
             methodParameterOIs[i] = ObjectInspectorUtils
                 .getStandardObjectInspector(parameterOIs[i],
-                    ObjectInspectorCopyOption.JAVA);
+                ObjectInspectorCopyOption.JAVA);
           } else {
             methodParameterOIs[i] = ObjectInspectorFactory
                 .getReflectionObjectInspector(methodParameterTypes[i],
-                    ObjectInspectorOptions.JAVA);
+                ObjectInspectorOptions.JAVA);
           }
         }
       }
@@ -314,7 +309,7 @@
         convertedParameters = new Object[methodParameterTypes.length];
         convertedParametersInArray = (Object[]) Array.newInstance(
             getClassFromType(lastParaElementType), parameterOIs.length
-                - methodParameterTypes.length + 1);
+            - methodParameterTypes.length + 1);
         convertedParameters[convertedParameters.length - 1] = convertedParametersInArray;
       } else {
         convertedParameters = new Object[parameterOIs.length];
@@ -358,7 +353,7 @@
     int unit = i % 10;
     return (i <= 0) ? "" : (i != 11 && unit == 1) ? i + "st"
         : (i != 12 && unit == 2) ? i + "nd" : (i != 13 && unit == 3) ? i + "rd"
-            : i + "th";
+        : i + "th";
   }
 
   /**
@@ -406,4 +401,7 @@
     return -1; // not found
   }
 
+  private GenericUDFUtils() {
+    // prevent instantiation
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java Tue Feb  9 07:55:30 2010
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.Constants;
@@ -34,16 +32,11 @@
  * thrown.
  */
 public class GenericUDFWhen extends GenericUDF {
-
-  private static Log LOG = LogFactory.getLog(GenericUDFWhen.class.getName());
-
-  ObjectInspector[] argumentOIs;
-  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
-  GenericUDFUtils.ReturnObjectInspectorResolver caseOIResolver;
+  private ObjectInspector[] argumentOIs;
+  private GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
 
   @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments)
-      throws UDFArgumentTypeException {
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentTypeException {
 
     argumentOIs = arguments;
     returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
@@ -57,9 +50,9 @@
       if (!returnOIResolver.update(arguments[i + 1])) {
         throw new UDFArgumentTypeException(i + 1,
             "The expressions after THEN should have the same type: \""
-                + returnOIResolver.get().getTypeName()
-                + "\" is expected but \"" + arguments[i + 1].getTypeName()
-                + "\" is found");
+            + returnOIResolver.get().getTypeName()
+            + "\" is expected but \"" + arguments[i + 1].getTypeName()
+            + "\" is found");
       }
     }
     if (arguments.length % 2 == 1) {
@@ -67,9 +60,9 @@
       if (!returnOIResolver.update(arguments[i + 1])) {
         throw new UDFArgumentTypeException(i + 1,
             "The expression after ELSE should have the same type as those after THEN: \""
-                + returnOIResolver.get().getTypeName()
-                + "\" is expected but \"" + arguments[i + 1].getTypeName()
-                + "\" is found");
+            + returnOIResolver.get().getTypeName()
+            + "\" is expected but \"" + arguments[i + 1].getTypeName()
+            + "\" is found");
       }
     }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java Tue Feb  9 07:55:30 2010
@@ -73,7 +73,7 @@
   }
 
   /**
-   * Passes an output row to the collector
+   * Passes an output row to the collector.
    * 
    * @param o
    * @throws HiveException

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java Tue Feb  9 07:55:30 2010
@@ -21,27 +21,30 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
-@Description(name = "explode", value = "_FUNC_(a) - separates the elements of array a into multiple rows ")
+/**
+ * GenericUDTFExplode.
+ *
+ */
+@Description(name = "explode",
+    value = "_FUNC_(a) - separates the elements of array a into multiple rows ")
 public class GenericUDTFExplode extends GenericUDTF {
 
-  ListObjectInspector listOI = null;
+  private ListObjectInspector listOI = null;
 
   @Override
   public void close() throws HiveException {
   }
 
   @Override
-  public StructObjectInspector initialize(ObjectInspector[] args)
-      throws UDFArgumentException {
-
+  public StructObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
     if (args.length != 1) {
       throw new UDFArgumentException("explode() takes only one argument");
     }
@@ -59,7 +62,7 @@
         fieldOIs);
   }
 
-  Object forwardObj[] = new Object[1];
+  private Object[] forwardObj = new Object[1];
 
   @Override
   public void process(Object[] o) throws HiveException {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java Tue Feb  9 07:55:30 2010
@@ -23,7 +23,7 @@
 
 /**
  * UDTFCollector collects data from a GenericUDTF and passes the data to a
- * UDTFOperator
+ * UDTFOperator.
  */
 public class UDTFCollector implements Collector {
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java Tue Feb  9 07:55:30 2010
@@ -91,7 +91,7 @@
    */
   public static byte[] convertToByteArray(byte n) {
     n = (byte) (n ^ ((byte) 0x80)); // flip MSB because "byte" is signed
-    return new byte[] { n };
+    return new byte[] {n};
   }
 
   /**

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/WrappedRuntimeException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/WrappedRuntimeException.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/WrappedRuntimeException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/WrappedRuntimeException.java Tue Feb  9 07:55:30 2010
@@ -110,7 +110,7 @@
   public WrappedRuntimeException(Exception except) {
     super(
         except == null || except.getMessage() == null ? "No message available"
-            : except.getMessage());
+        : except.getMessage());
 
     if (except instanceof WrappedRuntimeException
         && ((WrappedRuntimeException) except)._except != null) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordHeader.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordHeader.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordHeader.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/recman/RecordHeader.java Tue Feb  9 07:55:30 2010
@@ -75,7 +75,7 @@
   // offsets
   private static final short O_CURRENTSIZE = 0; // int currentSize
   private static final short O_AVAILABLESIZE = Magic.SZ_INT; // int
-                                                             // availableSize
+  // availableSize
   static final int SIZE = O_AVAILABLESIZE + Magic.SZ_INT;
 
   // my block and the position within the block

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Tue Feb  9 07:55:30 2010
@@ -66,6 +66,10 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.thrift.protocol.TBinaryProtocol;
 
+/**
+ * QTestUtil.
+ *
+ */
 public class QTestUtil {
 
   private String testWarehouse;
@@ -163,8 +167,7 @@
     this(outDir, logDir, false);
   }
 
-  public QTestUtil(String outDir, String logDir, boolean miniMr)
-      throws Exception {
+  public QTestUtil(String outDir, String logDir, boolean miniMr) throws Exception {
     this.outDir = outDir;
     this.logDir = logDir;
     conf = new HiveConf(Driver.class);
@@ -236,13 +239,13 @@
   public void cleanUp() throws Exception {
     String warehousePath = ((new URI(testWarehouse)).getPath());
     // Drop any tables that remain due to unsuccessful runs
-    for (String s : new String[] { "src", "src1", "src_json", "src_thrift",
+    for (String s : new String[] {"src", "src1", "src_json", "src_thrift",
         "src_sequencefile", "srcpart", "srcbucket", "srcbucket2", "dest1",
         "dest2", "dest3", "dest4", "dest4_sequencefile", "dest_j1", "dest_j2",
-        "dest_g1", "dest_g2", "fetchtask_ioexception" }) {
+        "dest_g1", "dest_g2", "fetchtask_ioexception"}) {
       db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, s);
     }
-    for (String s : new String[] { "dest4.out", "union.out" }) {
+    for (String s : new String[] {"dest4.out", "union.out"}) {
       deleteDirectory(new File(warehousePath, s));
     }
     FunctionRegistry.unregisterTemporaryUDF("test_udaf");
@@ -287,8 +290,8 @@
     Path fpath;
     Path newfpath;
     HashMap<String, String> part_spec = new HashMap<String, String>();
-    for (String ds : new String[] { "2008-04-08", "2008-04-09" }) {
-      for (String hr : new String[] { "11", "12" }) {
+    for (String ds : new String[] {"2008-04-08", "2008-04-09"}) {
+      for (String hr : new String[] {"11", "12"}) {
         part_spec.clear();
         part_spec.put("ds", ds);
         part_spec.put("hr", hr);
@@ -310,7 +313,7 @@
     // db.createTable("srcbucket", cols, null, TextInputFormat.class,
     // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
     srcTables.add("srcbucket");
-    for (String fname : new String[] { "srcbucket0.txt", "srcbucket1.txt" }) {
+    for (String fname : new String[] {"srcbucket0.txt", "srcbucket1.txt"}) {
       fpath = new Path(testFiles, fname);
       newfpath = new Path(tmppath, fname);
       fs.copyFromLocalFile(false, true, fpath, newfpath);
@@ -318,12 +321,13 @@
           + "' INTO TABLE srcbucket");
     }
 
-    runCreateTableCmd("CREATE TABLE srcbucket2(key int, value string) CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
+    runCreateTableCmd("CREATE TABLE srcbucket2(key int, value string) "
+        + "CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
     // db.createTable("srcbucket", cols, null, TextInputFormat.class,
     // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
     srcTables.add("srcbucket2");
-    for (String fname : new String[] { "srcbucket20.txt", "srcbucket21.txt",
-        "srcbucket22.txt", "srcbucket23.txt" }) {
+    for (String fname : new String[] {"srcbucket20.txt", "srcbucket21.txt",
+        "srcbucket22.txt", "srcbucket23.txt"}) {
       fpath = new Path(testFiles, fname);
       newfpath = new Path(tmppath, fname);
       fs.copyFromLocalFile(false, true, fpath, newfpath);
@@ -331,7 +335,7 @@
           + "' INTO TABLE srcbucket2");
     }
 
-    for (String tname : new String[] { "src", "src1" }) {
+    for (String tname : new String[] {"src", "src1"}) {
       db.createTable(tname, cols, null, TextInputFormat.class,
           IgnoreKeyTextOutputFormat.class);
       srcTables.add(tname);
@@ -599,8 +603,7 @@
     }
   }
 
-  public int checkPlan(String tname, List<Task<? extends Serializable>> tasks)
-      throws Exception {
+  public int checkPlan(String tname, List<Task<? extends Serializable>> tasks) throws Exception {
 
     if (tasks != null) {
       File planDir = new File(outDir, "plan");
@@ -735,11 +738,17 @@
   public int checkCliDriverResults(String tname) throws Exception {
     String[] cmdArray;
 
-    cmdArray = new String[] { "diff", "-a", "-I", "file:", "-I", "/tmp/", "-I",
-        "invalidscheme:", "-I", "lastUpdateTime", "-I", "lastAccessTime", "-I",
-        "owner", "-I", "transient_lastDdlTime",
+    cmdArray = new String[] {
+        "diff", "-a",
+        "-I", "file:",
+        "-I", "/tmp/",
+        "-I", "invalidscheme:",
+        "-I", "lastUpdateTime",
+        "-I", "lastAccessTime",
+        "-I", "owner",
+        "-I", "transient_lastDdlTime",
         (new File(logDir, tname + ".out")).getPath(),
-        (new File(outDir, tname + ".out")).getPath() };
+        (new File(outDir, tname + ".out")).getPath()};
 
     System.out.println(org.apache.commons.lang.StringUtils.join(cmdArray, ' '));
 
@@ -773,8 +782,7 @@
     return pd.parse(qMap.get(tname));
   }
 
-  public List<Task<? extends Serializable>> analyzeAST(ASTNode ast)
-      throws Exception {
+  public List<Task<? extends Serializable>> analyzeAST(ASTNode ast) throws Exception {
 
     // Do semantic analysis and plan generation
     Context ctx = new Context(conf);
@@ -792,7 +800,7 @@
   }
 
   /**
-   * QTRunner: Runnable class for running a a single query file
+   * QTRunner: Runnable class for running a a single query file.
    * 
    **/
   public static class QTRunner implements Runnable {

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestMTQueries.java Tue Feb  9 07:55:30 2010
@@ -23,7 +23,7 @@
 import junit.framework.TestCase;
 
 /**
- * Suite for testing running of queries in multi-threaded mode
+ * Suite for testing running of queries in multi-threaded mode.
  */
 public class TestMTQueries extends TestCase {
 
@@ -35,8 +35,8 @@
       + "/clientpositive";
 
   public void testMTQueries1() throws Exception {
-    String[] testNames = new String[] { "join1.q", "join2.q", "groupby1.q",
-        "groupby2.q", "join3.q", "input1.q", "input19.q" };
+    String[] testNames = new String[] {"join1.q", "join2.q", "groupby1.q",
+        "groupby2.q", "join3.q", "input1.q", "input19.q"};
     String[] logDirs = new String[testNames.length];
     String[] resDirs = new String[testNames.length];
     File[] qfiles = new File[testNames.length];