You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/02/09 08:55:50 UTC

svn commit: r907950 [2/15] - in /hadoop/hive/trunk: ./ checkstyle/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/java/org/apache/hadoop/hive/contrib/file...

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/IdentityMapper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/IdentityMapper.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/IdentityMapper.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/IdentityMapper.java Tue Feb  9 07:55:30 2010
@@ -25,13 +25,17 @@
  * Example Mapper (Identity).
  */
 public final class IdentityMapper {
+
   public static void main(final String[] args) throws Exception {
     new GenericMR().map(System.in, System.out, new Mapper() {
       @Override
-      public void map(final String[] record, final Output output)
-          throws Exception {
+      public void map(final String[] record, final Output output) throws Exception {
         output.collect(record);
       }
     });
   }
+
+  private IdentityMapper() {
+    // prevent instantiation
+  }
 }

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/WordCountReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/WordCountReduce.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/WordCountReduce.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/mr/example/WordCountReduce.java Tue Feb  9 07:55:30 2010
@@ -27,6 +27,11 @@
  * Example Reducer (WordCount).
  */
 public final class WordCountReduce {
+
+  private WordCountReduce() {
+    // prevent instantiation
+  }
+
   public static void main(final String[] args) throws Exception {
     new GenericMR().reduce(System.in, System.out, new Reducer() {
       public void reduce(String key, Iterator<String[]> records, Output output)
@@ -38,7 +43,7 @@
           count += Integer.parseInt(records.next()[1]);
         }
 
-        output.collect(new String[] { key, String.valueOf(count) });
+        output.collect(new String[] {key, String.valueOf(count)});
       }
     });
   }

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java Tue Feb  9 07:55:30 2010
@@ -211,7 +211,7 @@
     if (outputFormatString == null) {
       throw new SerDeException(
           "Cannot write data into table because \"output.format.string\""
-              + " is not specified in serde properties of the table.");
+          + " is not specified in serde properties of the table.");
     }
 
     // Get all the fields out.

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogDeserializer.java Tue Feb  9 07:55:30 2010
@@ -37,6 +37,10 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
+/**
+ * S3LogDeserializer.
+ *
+ */
 public class S3LogDeserializer implements Deserializer {
 
   public static final Log LOG = LogFactory.getLog(S3LogDeserializer.class
@@ -73,7 +77,7 @@
 
     cachedObjectInspector = ObjectInspectorFactory
         .getReflectionObjectInspector(S3LogStruct.class,
-            ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+        ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
 
     LOG.debug(getClass().getName() + ": initialized");
   }

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogStruct.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogStruct.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogStruct.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/s3/S3LogStruct.java Tue Feb  9 07:55:30 2010
@@ -1,5 +1,9 @@
 package org.apache.hadoop.hive.contrib.serde2.s3;
 
+/**
+ * S3LogStruct.
+ *
+ */
 public class S3LogStruct {
 
   public String bucketowner;

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java Tue Feb  9 07:55:30 2010
@@ -32,7 +32,7 @@
  * more efficient.
  * 
  */
-public class UDAFExampleAvg extends UDAF {
+public final class UDAFExampleAvg extends UDAF {
 
   /**
    * The internal state of an aggregation for average.
@@ -119,4 +119,8 @@
     }
   }
 
+  private UDAFExampleAvg() {
+    // prevent instantiation
+  }
+
 }

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java Tue Feb  9 07:55:30 2010
@@ -19,6 +19,10 @@
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 
+/**
+ * UDFExampleAdd.
+ *
+ */
 public class UDFExampleAdd extends UDF {
 
   public Integer evaluate(Integer... a) {

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java Tue Feb  9 07:55:30 2010
@@ -21,6 +21,10 @@
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 
+/**
+ * UDFExampleArraySum.
+ *
+ */
 public class UDFExampleArraySum extends UDF {
 
   public Double evaluate(List<Double> a) {

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java Tue Feb  9 07:55:30 2010
@@ -19,6 +19,10 @@
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 
+/**
+ * UDFExampleFormat.
+ *
+ */
 public class UDFExampleFormat extends UDF {
 
   public String evaluate(String format, Object... args) {

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java Tue Feb  9 07:55:30 2010
@@ -23,6 +23,10 @@
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 
+/**
+ * UDFExampleMapConcat.
+ *
+ */
 public class UDFExampleMapConcat extends UDF {
 
   public String evaluate(Map<String, String> a) {

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java Tue Feb  9 07:55:30 2010
@@ -21,6 +21,10 @@
 
 import org.apache.hadoop.hive.ql.exec.UDF;
 
+/**
+ * UDFExampleStructPrint.
+ *
+ */
 public class UDFExampleStructPrint extends UDF {
 
   public String evaluate(Object a) {

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java Tue Feb  9 07:55:30 2010
@@ -21,8 +21,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -30,8 +30,12 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
-@Description(name = "explode2", value = "_FUNC_(a) - like explode, but outputs two identical columns (for "
-    + "testing purposes)")
+/**
+ * GenericUDTFExplode2.
+ *
+ */
+@Description(name = "explode2",
+    value = "_FUNC_(a) - like explode, but outputs two identical columns (for testing purposes)")
 public class GenericUDTFExplode2 extends GenericUDTF {
 
   ListObjectInspector listOI = null;

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesInput.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesInput.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesInput.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesInput.java Tue Feb  9 07:55:30 2010
@@ -430,9 +430,9 @@
   public byte[] readRawVector() throws IOException {
     Buffer buffer = new Buffer();
     int length = readVectorHeader();
-    buffer.append(new byte[] { (byte) Type.VECTOR.code,
+    buffer.append(new byte[] {(byte) Type.VECTOR.code,
         (byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
-        (byte) (0xff & (length >> 8)), (byte) (0xff & length) });
+        (byte) (0xff & (length >> 8)), (byte) (0xff & length)});
     for (int i = 0; i < length; i++) {
       buffer.append(readRaw());
     }
@@ -473,13 +473,13 @@
    * @throws IOException
    */
   public byte[] readRawList() throws IOException {
-    Buffer buffer = new Buffer(new byte[] { (byte) Type.LIST.code });
+    Buffer buffer = new Buffer(new byte[] {(byte) Type.LIST.code});
     byte[] bytes = readRaw();
     while (bytes != null) {
       buffer.append(bytes);
       bytes = readRaw();
     }
-    buffer.append(new byte[] { (byte) Type.MARKER.code });
+    buffer.append(new byte[] {(byte) Type.MARKER.code});
     return buffer.get();
   }
 
@@ -510,9 +510,9 @@
   public byte[] readRawMap() throws IOException {
     Buffer buffer = new Buffer();
     int length = readMapHeader();
-    buffer.append(new byte[] { (byte) Type.MAP.code,
+    buffer.append(new byte[] {(byte) Type.MAP.code,
         (byte) (0xff & (length >> 24)), (byte) (0xff & (length >> 16)),
-        (byte) (0xff & (length >> 8)), (byte) (0xff & length) });
+        (byte) (0xff & (length >> 8)), (byte) (0xff & length)});
     for (int i = 0; i < length; i++) {
       buffer.append(readRaw());
       buffer.append(readRaw());

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordReader.java Tue Feb  9 07:55:30 2010
@@ -49,23 +49,27 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
+/**
+ * TypedBytesRecordReader.
+ *
+ */
 public class TypedBytesRecordReader implements RecordReader {
 
   private DataInputStream din;
   private TypedBytesWritableInput tbIn;
 
-  NonSyncDataOutputBuffer barrStr = new NonSyncDataOutputBuffer();
-  TypedBytesWritableOutput tbOut;
+  private NonSyncDataOutputBuffer barrStr = new NonSyncDataOutputBuffer();
+  private TypedBytesWritableOutput tbOut;
 
-  ArrayList<Writable> row = new ArrayList<Writable>(0);
-  ArrayList<String> rowTypeName = new ArrayList<String>(0);
-  List<String> columnTypes;
-
-  ArrayList<ObjectInspector> srcOIns = new ArrayList<ObjectInspector>();
-  ArrayList<ObjectInspector> dstOIns = new ArrayList<ObjectInspector>();
-  ArrayList<Converter> converters = new ArrayList<Converter>();
+  private ArrayList<Writable> row = new ArrayList<Writable>(0);
+  private ArrayList<String> rowTypeName = new ArrayList<String>(0);
+  private List<String> columnTypes;
+
+  private ArrayList<ObjectInspector> srcOIns = new ArrayList<ObjectInspector>();
+  private ArrayList<ObjectInspector> dstOIns = new ArrayList<ObjectInspector>();
+  private ArrayList<Converter> converters = new ArrayList<Converter>();
 
-  static private Map<Type, String> typedBytesToTypeName = new HashMap<Type, String>();
+  private static Map<Type, String> typedBytesToTypeName = new HashMap<Type, String>();
   static {
     typedBytesToTypeName.put(getType(1), Constants.TINYINT_TYPE_NAME);
     typedBytesToTypeName.put(getType(2), Constants.BOOLEAN_TYPE_NAME);
@@ -77,8 +81,7 @@
     typedBytesToTypeName.put(getType(11), Constants.SMALLINT_TYPE_NAME);
   }
 
-  public void initialize(InputStream in, Configuration conf, Properties tbl)
-      throws IOException {
+  public void initialize(InputStream in, Configuration conf, Properties tbl) throws IOException {
     din = new DataInputStream(in);
     tbIn = new TypedBytesWritableInput(din);
     tbOut = new TypedBytesWritableOutput(barrStr);
@@ -152,7 +155,7 @@
             .getTypeEntryFromTypeName(typeName);
         srcOIns
             .add(PrimitiveObjectInspectorFactory
-                .getPrimitiveWritableObjectInspector(srcTypeEntry.primitiveCategory));
+            .getPrimitiveWritableObjectInspector(srcTypeEntry.primitiveCategory));
         converters.add(ObjectInspectorConverters.getConverter(srcOIns.get(pos),
             dstOIns.get(pos)));
       } else {
@@ -164,38 +167,30 @@
 
       Writable w = row.get(pos);
       switch (type) {
-      case BYTE: {
+      case BYTE:
         tbIn.readByte((ByteWritable) w);
         break;
-      }
-      case BOOL: {
+      case BOOL:
         tbIn.readBoolean((BooleanWritable) w);
         break;
-      }
-      case INT: {
+      case INT:
         tbIn.readInt((IntWritable) w);
         break;
-      }
-      case SHORT: {
+      case SHORT:
         tbIn.readShort((ShortWritable) w);
         break;
-      }
-      case LONG: {
+      case LONG:
         tbIn.readLong((LongWritable) w);
         break;
-      }
-      case FLOAT: {
+      case FLOAT:
         tbIn.readFloat((FloatWritable) w);
         break;
-      }
-      case DOUBLE: {
+      case DOUBLE:
         tbIn.readDouble((DoubleWritable) w);
         break;
-      }
-      case STRING: {
+      case STRING:
         tbIn.readText((Text) w);
         break;
-      }
       default:
         assert false; // should never come here
       }
@@ -237,7 +232,7 @@
     }
   }
 
-  static public Type getType(int code) {
+  public static Type getType(int code) {
     for (Type type : Type.values()) {
       if (type.code == code) {
         return type;

Modified: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordWriter.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordWriter.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordWriter.java (original)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/util/typedbytes/TypedBytesRecordWriter.java Tue Feb  9 07:55:30 2010
@@ -26,6 +26,10 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Writable;
 
+/**
+ * TypedBytesRecordWriter.
+ *
+ */
 public class TypedBytesRecordWriter implements RecordWriter {
 
   private OutputStream out;

Modified: hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java (original)
+++ hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/mr/TestGenericMR.java Tue Feb  9 07:55:30 2010
@@ -24,18 +24,22 @@
 
 import junit.framework.TestCase;
 
+/**
+ * TestGenericMR.
+ *
+ */
 public final class TestGenericMR extends TestCase {
   public void testReduceTooFar() throws Exception {
     try {
       new GenericMR().reduce(new StringReader("a\tb\tc"), new StringWriter(),
           new Reducer() {
-            public void reduce(String key, Iterator<String[]> records,
-                Output output) throws Exception {
-              while (true) {
-                records.next();
-              }
-            }
-          });
+        public void reduce(String key, Iterator<String[]> records,
+            Output output) throws Exception {
+          while (true) {
+            records.next();
+          }
+        }
+      });
     } catch (final NoSuchElementException nsee) {
       // expected
       return;
@@ -70,7 +74,7 @@
       public void map(String[] record, Output output) throws Exception {
         for (final String kvs : record[0].split(",")) {
           final String[] kv = kvs.split("=");
-          output.collect(new String[] { kv[0], kv[1] });
+          output.collect(new String[] {kv[0], kv[1]});
         }
       }
     });
@@ -101,7 +105,7 @@
           count += Integer.parseInt(records.next()[1]);
         }
 
-        output.collect(new String[] { key, String.valueOf(count) });
+        output.collect(new String[] {key, String.valueOf(count)});
       }
     });
 

Modified: hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java (original)
+++ hadoop/hive/trunk/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java Tue Feb  9 07:55:30 2010
@@ -29,6 +29,10 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.io.Text;
 
+/**
+ * TestRegexSerDe.
+ *
+ */
 public class TestRegexSerDe extends TestCase {
 
   private SerDe createSerDe(String fieldNames, String fieldTypes,
@@ -53,14 +57,16 @@
       SerDe serDe = createSerDe(
           "host,identity,user,time,request,status,size,referer,agent",
           "string,string,string,string,string,string,string,string,string",
-          "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") ([0-9]*) ([0-9]*) ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\")",
+          "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") " 
+          + "([0-9]*) ([0-9]*) ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\")",
           "%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s");
 
       // Data
       Text t = new Text(
           "127.0.0.1 - - [26/May/2009:00:00:00 +0000] "
               + "\"GET /someurl/?track=Blabla(Main) HTTP/1.1\" 200 5864 - "
-              + "\"Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65 Safari/525.19\"");
+              + "\"Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) " 
+              + "AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65 Safari/525.19\"");
 
       // Deserialize
       Object row = serDe.deserialize(t);

Modified: hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java (original)
+++ hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java Tue Feb  9 07:55:30 2010
@@ -29,7 +29,7 @@
   }
 
   /**
-   * HWIAuth is used in SortedSets(s) the compartTo method is required
+   * HWIAuth is used in SortedSets(s) the compartTo method is required.
    * 
    * @return chained call to String.compareTo based on user property
    */
@@ -45,7 +45,7 @@
   }
 
   /**
-   * HWIAuth is used in Map(s) the hashCode method is required
+   * HWIAuth is used in Map(s) the hashCode method is required.
    * 
    * @see java.lang.Object#hashCode()
    */
@@ -58,7 +58,7 @@
   }
 
   /**
-   * HWIAuth is used in Map(s) the equals method is required
+   * HWIAuth is used in Map(s) the equals method is required.
    * 
    * @see java.lang.Object#equals(java.lang.Object)
    */

Modified: hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java (original)
+++ hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java Tue Feb  9 07:55:30 2010
@@ -1,5 +1,9 @@
 package org.apache.hadoop.hive.hwi;
 
+/**
+ * HWIException.
+ *
+ */
 public class HWIException extends Exception {
 
   private static final long serialVersionUID = 1L;
@@ -8,17 +12,17 @@
     super();
   }
 
-  /** Specify an error String with the Exception */
+  /** Specify an error String with the Exception. */
   public HWIException(String arg0) {
     super(arg0);
   }
 
-  /** Wrap an Exception in HWIException */
+  /** Wrap an Exception in HWIException. */
   public HWIException(Throwable arg0) {
     super(arg0);
   }
 
-  /** Specify an error String and wrap an Exception in HWIException */
+  /** Specify an error String and wrap an Exception in HWIException. */
   public HWIException(String arg0, Throwable arg1) {
     super(arg0, arg1);
   }

Modified: hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java (original)
+++ hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java Tue Feb  9 07:55:30 2010
@@ -115,7 +115,7 @@
   }
 
   /**
-   * Shut down the running HWI Server
+   * Shut down the running HWI Server.
    * 
    * @throws Exception
    *           Running Thread.stop() can and probably will throw this

Modified: hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java (original)
+++ hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java Tue Feb  9 07:55:30 2010
@@ -51,7 +51,7 @@
     NEW, READY, QUERY_SET, QUERY_RUNNING, DESTROY, KILL_QUERY
   };
 
-  /** The Web Interface sessionName this is used to identify the session */
+  /** The Web Interface sessionName this is used to identify the session. */
   private final String sessionName;
 
   /**
@@ -63,38 +63,38 @@
 
   private CliSessionState ss;
 
-  /** Standard out from the session will be written to this local file */
+  /** Standard out from the session will be written to this local file. */
   private String resultFile;
 
-  /** Standard error from the session will be written to this local file */
+  /** Standard error from the session will be written to this local file. */
   private String errorFile;
 
   /**
    * The results from the Driver. This is used for storing the most result
-   * results from the driver in memory
+   * results from the driver in memory.
    */
   private ArrayList<ArrayList<String>> resultBucket;
 
-  /** Limits the resultBucket to be no greater then this size */
+  /** Limits the resultBucket to be no greater then this size. */
   private int resultBucketMaxSize;
 
-  /** List of queries that this item should/has operated on */
+  /** List of queries that this item should/has operated on. */
   private List<String> queries;
 
-  /** status code results of queries */
+  /** status code results of queries. */
   private List<Integer> queryRet;
 
-  /** Reference to the configuration */
+  /** Reference to the configuration. */
   private HiveConf conf;
 
-  /** User privileges */
+  /** User privileges. */
   private HWIAuth auth;
 
   public Thread runnable;
 
   /**
    * Threading SessionState issues require us to capture a reference to the hive
-   * history file and store it
+   * history file and store it.
    */
   private String historyFile;
 
@@ -150,7 +150,7 @@
      * HiveHistoryFileName will not be accessible outside this thread. We must
      * capture this now.
      */
-    historyFile = ss.get().getHiveHistory().getHistFileName();
+    historyFile = SessionState.get().getHiveHistory().getHistFileName();
     l4j.debug("HWISessionItem itemInit Complete " + getSessionName());
     status = WebSessionItemStatus.READY;
 
@@ -214,7 +214,7 @@
   }
 
   /**
-   * Helper function to get configuration variables
+   * Helper function to get configuration variables.
    * 
    * @param wanted
    *          a ConfVar
@@ -392,7 +392,7 @@
   }
 
   /**
-   * This is a chained call to SessionState.getIsSilent()
+   * This is a chained call to SessionState.getIsSilent().
    */
   public boolean getSSIsSilent() throws HWIException {
     if (ss == null) {
@@ -401,7 +401,7 @@
     return ss.getIsSilent();
   }
 
-  /** to support sorting/Set */
+  /** to support sorting/Set. */
   public int compareTo(HWISessionItem other) {
     if (other == null) {
       return -1;
@@ -427,7 +427,7 @@
   }
 
   /**
-   * Uses the sessionName property to compare to sessions
+   * Uses the sessionName property to compare to sessions.
    * 
    * @return true if sessionNames are equal false otherwise
    */
@@ -456,7 +456,7 @@
   }
 
   /**
-   * The session name is an identifier to recognize the session
+   * The session name is an identifier to recognize the session.
    * 
    * @return the session's name
    */
@@ -476,7 +476,7 @@
   }
 
   /**
-   * Currently unused
+   * Currently unused.
    * 
    * @return a String with the full path to the error file.
    */
@@ -485,7 +485,7 @@
   }
 
   /**
-   * Currently unused
+   * Currently unused.
    * 
    * @param errorFile
    *          the full path to the file for results.
@@ -509,13 +509,13 @@
     this.auth = auth;
   }
 
-  /** returns an unmodifiable list of queries */
+  /** Returns an unmodifiable list of queries. */
   public List<String> getQueries() {
     return java.util.Collections.unmodifiableList(queries);
   }
 
   /**
-   * adds a new query to the execution list
+   * Adds a new query to the execution list.
    * 
    * @param query
    *          query to be added to the list
@@ -526,7 +526,7 @@
   }
 
   /**
-   * removes a query from the execution list
+   * Removes a query from the execution list.
    * 
    * @param item
    *          the 0 based index of the item to be removed
@@ -541,13 +541,13 @@
     queries.clear();
   }
 
-  /** returns the value for resultBucketMaxSize */
+  /** returns the value for resultBucketMaxSize. */
   public int getResultBucketMaxSize() {
     return resultBucketMaxSize;
   }
 
   /**
-   * sets the value for resultBucketMaxSize
+   * sets the value for resultBucketMaxSize.
    * 
    * @param size
    *          the new size
@@ -556,13 +556,13 @@
     resultBucketMaxSize = size;
   }
 
-  /** gets the value for resultBucket */
+  /** gets the value for resultBucket. */
   public ArrayList<ArrayList<String>> getResultBucket() {
     return resultBucket;
   }
 
   /**
-   * The HWISessionItem stores the result of each query in an array
+   * The HWISessionItem stores the result of each query in an array.
    * 
    * @return unmodifiable list of return codes
    */

Modified: hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java (original)
+++ hadoop/hive/trunk/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java Tue Feb  9 07:55:30 2010
@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.hive.hwi;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
-import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -200,7 +200,7 @@
   }
 
   /**
-   * Used to list all users that have at least one session
+   * Used to list all users that have at least one session.
    * 
    * @return keySet of items all users that have any sessions
    */
@@ -209,7 +209,7 @@
   }
 
   /**
-   * Used to list all the sessions of a user
+   * Used to list all the sessions of a user.
    * 
    * @param auth
    *          the user being enquired about

Modified: hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java (original)
+++ hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java Tue Feb  9 07:55:30 2010
@@ -10,6 +10,10 @@
 import org.apache.hadoop.hive.shims.JettyShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
 
+/**
+ * TestHWIServer.
+ *
+ */
 public class TestHWIServer extends TestCase {
 
   public TestHWIServer(String name) {

Modified: hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java (original)
+++ hadoop/hive/trunk/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWISessionManager.java Tue Feb  9 07:55:30 2010
@@ -29,6 +29,10 @@
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.history.HiveHistoryViewer;
 
+/**
+ * TestHWISessionManager.
+ *
+ */
 public class TestHWISessionManager extends TestCase {
 
   private static String tableName = "test_hwi_table";
@@ -63,7 +67,7 @@
     // create a user
     HWIAuth user1 = new HWIAuth();
     user1.setUser("hadoop");
-    user1.setGroups(new String[] { "hadoop" });
+    user1.setGroups(new String[] {"hadoop"});
 
     // create two sessions for user
     HWISessionItem user1_item1 = hsm.createSession(user1, "session1");
@@ -72,7 +76,7 @@
     // create second user
     HWIAuth user2 = new HWIAuth();
     user2.setUser("user2");
-    user2.setGroups(new String[] { "user2" });
+    user2.setGroups(new String[] {"user2"});
 
     // create one session for this user
     HWISessionItem user2_item1 = hsm.createSession(user2, "session1");

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveCallableStatement.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveCallableStatement.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveCallableStatement.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveCallableStatement.java Tue Feb  9 07:55:30 2010
@@ -41,6 +41,10 @@
 import java.util.Calendar;
 import java.util.Map;
 
+/**
+ * HiveCallableStatement.
+ *
+ */
 public class HiveCallableStatement implements java.sql.CallableStatement {
 
   /**
@@ -100,8 +104,7 @@
    * @see java.sql.CallableStatement#getBigDecimal(int, int)
    */
 
-  public BigDecimal getBigDecimal(int parameterIndex, int scale)
-      throws SQLException {
+  public BigDecimal getBigDecimal(int parameterIndex, int scale) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -476,8 +479,7 @@
    * @see java.sql.CallableStatement#getObject(java.lang.String, java.util.Map)
    */
 
-  public Object getObject(String parameterName, Map<String, Class<?>> map)
-      throws SQLException {
+  public Object getObject(String parameterName, Map<String, Class<?>> map) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -665,8 +667,7 @@
    * @see java.sql.CallableStatement#getTimestamp(int, java.util.Calendar)
    */
 
-  public Timestamp getTimestamp(int parameterIndex, Calendar cal)
-      throws SQLException {
+  public Timestamp getTimestamp(int parameterIndex, Calendar cal) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -678,8 +679,7 @@
    * java.util.Calendar)
    */
 
-  public Timestamp getTimestamp(String parameterName, Calendar cal)
-      throws SQLException {
+  public Timestamp getTimestamp(String parameterName, Calendar cal) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -712,8 +712,7 @@
    * @see java.sql.CallableStatement#registerOutParameter(int, int)
    */
 
-  public void registerOutParameter(int parameterIndex, int sqlType)
-      throws SQLException {
+  public void registerOutParameter(int parameterIndex, int sqlType) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -724,8 +723,7 @@
    * @see java.sql.CallableStatement#registerOutParameter(java.lang.String, int)
    */
 
-  public void registerOutParameter(String parameterName, int sqlType)
-      throws SQLException {
+  public void registerOutParameter(String parameterName, int sqlType) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveConnection.java Tue Feb  9 07:55:30 2010
@@ -47,6 +47,10 @@
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 
+/**
+ * HiveConnection.
+ *
+ */
 public class HiveConnection implements java.sql.Connection {
   JdbcSessionState session;
 
@@ -58,7 +62,7 @@
   private static final String URI_PREFIX = "jdbc:hive://";
 
   /**
-   * TODO: - parse uri (use java.net.URI?)
+   * TODO: - parse uri (use java.net.URI?).
    */
   public HiveConnection(String uri, Properties info) throws SQLException {
     session = new JdbcSessionState(new HiveConf(SessionState.class));

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDataSource.java Tue Feb  9 07:55:30 2010
@@ -24,6 +24,10 @@
 
 import javax.sql.DataSource;
 
+/**
+ * HiveDataSource.
+ *
+ */
 public class HiveDataSource implements DataSource {
 
   /**

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDatabaseMetaData.java Tue Feb  9 07:55:30 2010
@@ -28,6 +28,10 @@
 import java.util.jar.Attributes;
 import java.util.jar.Manifest;
 
+/**
+ * HiveDatabaseMetaData.
+ *
+ */
 public class HiveDatabaseMetaData implements java.sql.DatabaseMetaData {
 
   /**

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Tue Feb  9 07:55:30 2010
@@ -19,16 +19,20 @@
 package org.apache.hadoop.hive.jdbc;
 
 import java.sql.Connection;
+import java.sql.Driver;
 import java.sql.DriverPropertyInfo;
 import java.sql.SQLException;
 import java.util.Properties;
 import java.util.regex.Pattern;
 
-public class HiveDriver implements java.sql.Driver {
+/**
+ * HiveDriver.
+ *
+ */
+public class HiveDriver implements Driver {
   static {
     try {
-      java.sql.DriverManager
-          .registerDriver(new org.apache.hadoop.hive.jdbc.HiveDriver());
+      java.sql.DriverManager.registerDriver(new HiveDriver());
     } catch (SQLException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
@@ -51,27 +55,27 @@
   private static final boolean JDBC_COMPLIANT = false;
 
   /**
-   * The required prefix for the connection url
+   * The required prefix for the connection URL.
    */
   private static final String URL_PREFIX = "jdbc:hive://";
 
   /**
-   * If host is provided, without a port
+   * If host is provided, without a port.
    */
   private static final String DEFAULT_PORT = "10000";
 
   /**
-   * Property key for the database name
+   * Property key for the database name.
    */
   private static final String DBNAME_PROPERTY_KEY = "DBNAME";
 
   /**
-   * Property key for the Hive Server host
+   * Property key for the Hive Server host.
    */
   private static final String HOST_PROPERTY_KEY = "HOST";
 
   /**
-   * Property key for the Hive Server port
+   * Property key for the Hive Server port.
    */
   private static final String PORT_PROPERTY_KEY = "PORT";
 
@@ -122,8 +126,7 @@
     return MINOR_VERSION;
   }
 
-  public DriverPropertyInfo[] getPropertyInfo(String url, Properties info)
-      throws SQLException {
+  public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
     if (info == null) {
       info = new Properties();
     }
@@ -173,8 +176,7 @@
    * @return
    * @throws java.sql.SQLException
    */
-  private Properties parseURL(String url, Properties defaults)
-      throws java.sql.SQLException {
+  private Properties parseURL(String url, Properties defaults) throws SQLException {
     Properties urlProps = (defaults != null) ? new Properties(defaults)
         : new Properties();
 

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java Tue Feb  9 07:55:30 2010
@@ -29,6 +29,7 @@
 import java.sql.Date;
 import java.sql.NClob;
 import java.sql.ParameterMetaData;
+import java.sql.PreparedStatement;
 import java.sql.Ref;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
@@ -42,7 +43,11 @@
 
 import org.apache.hadoop.hive.service.HiveInterface;
 
-public class HivePreparedStatement implements java.sql.PreparedStatement {
+/**
+ * HivePreparedStatement.
+ *
+ */
+public class HivePreparedStatement implements PreparedStatement {
   String sql;
   JdbcSessionState session;
   HiveInterface client;
@@ -155,8 +160,7 @@
    * @see java.sql.PreparedStatement#setAsciiStream(int, java.io.InputStream)
    */
 
-  public void setAsciiStream(int parameterIndex, InputStream x)
-      throws SQLException {
+  public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -168,8 +172,7 @@
    * int)
    */
 
-  public void setAsciiStream(int parameterIndex, InputStream x, int length)
-      throws SQLException {
+  public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -181,8 +184,7 @@
    * long)
    */
 
-  public void setAsciiStream(int parameterIndex, InputStream x, long length)
-      throws SQLException {
+  public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -193,8 +195,7 @@
    * @see java.sql.PreparedStatement#setBigDecimal(int, java.math.BigDecimal)
    */
 
-  public void setBigDecimal(int parameterIndex, BigDecimal x)
-      throws SQLException {
+  public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -205,8 +206,7 @@
    * @see java.sql.PreparedStatement#setBinaryStream(int, java.io.InputStream)
    */
 
-  public void setBinaryStream(int parameterIndex, InputStream x)
-      throws SQLException {
+  public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -218,8 +218,7 @@
    * int)
    */
 
-  public void setBinaryStream(int parameterIndex, InputStream x, int length)
-      throws SQLException {
+  public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -231,8 +230,7 @@
    * long)
    */
 
-  public void setBinaryStream(int parameterIndex, InputStream x, long length)
-      throws SQLException {
+  public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -254,8 +252,7 @@
    * @see java.sql.PreparedStatement#setBlob(int, java.io.InputStream)
    */
 
-  public void setBlob(int parameterIndex, InputStream inputStream)
-      throws SQLException {
+  public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -266,8 +263,7 @@
    * @see java.sql.PreparedStatement#setBlob(int, java.io.InputStream, long)
    */
 
-  public void setBlob(int parameterIndex, InputStream inputStream, long length)
-      throws SQLException {
+  public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -311,8 +307,7 @@
    * @see java.sql.PreparedStatement#setCharacterStream(int, java.io.Reader)
    */
 
-  public void setCharacterStream(int parameterIndex, Reader reader)
-      throws SQLException {
+  public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -371,8 +366,7 @@
    * @see java.sql.PreparedStatement#setClob(int, java.io.Reader, long)
    */
 
-  public void setClob(int parameterIndex, Reader reader, long length)
-      throws SQLException {
+  public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -395,8 +389,7 @@
    * java.util.Calendar)
    */
 
-  public void setDate(int parameterIndex, Date x, Calendar cal)
-      throws SQLException {
+  public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -451,8 +444,7 @@
    * @see java.sql.PreparedStatement#setNCharacterStream(int, java.io.Reader)
    */
 
-  public void setNCharacterStream(int parameterIndex, Reader value)
-      throws SQLException {
+  public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -498,8 +490,7 @@
    * @see java.sql.PreparedStatement#setNClob(int, java.io.Reader, long)
    */
 
-  public void setNClob(int parameterIndex, Reader reader, long length)
-      throws SQLException {
+  public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -532,8 +523,7 @@
    * @see java.sql.PreparedStatement#setNull(int, int, java.lang.String)
    */
 
-  public void setNull(int paramIndex, int sqlType, String typeName)
-      throws SQLException {
+  public void setNull(int paramIndex, int sqlType, String typeName) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -567,8 +557,8 @@
    * @see java.sql.PreparedStatement#setObject(int, java.lang.Object, int, int)
    */
 
-  public void setObject(int parameterIndex, Object x, int targetSqlType,
-      int scale) throws SQLException {
+  public void setObject(int parameterIndex, Object x, int targetSqlType, int scale)
+      throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -601,8 +591,7 @@
    * @see java.sql.PreparedStatement#setSQLXML(int, java.sql.SQLXML)
    */
 
-  public void setSQLXML(int parameterIndex, SQLXML xmlObject)
-      throws SQLException {
+  public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -647,8 +636,7 @@
    * java.util.Calendar)
    */
 
-  public void setTime(int parameterIndex, Time x, Calendar cal)
-      throws SQLException {
+  public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -839,8 +827,7 @@
    * @see java.sql.Statement#executeUpdate(java.lang.String, int)
    */
 
-  public int executeUpdate(String sql, int autoGeneratedKeys)
-      throws SQLException {
+  public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -862,8 +849,7 @@
    * @see java.sql.Statement#executeUpdate(java.lang.String, java.lang.String[])
    */
 
-  public int executeUpdate(String sql, String[] columnNames)
-      throws SQLException {
+  public int executeUpdate(String sql, String[] columnNames) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSet.java Tue Feb  9 07:55:30 2010
@@ -50,6 +50,10 @@
 import org.apache.hadoop.hive.service.HiveInterface;
 import org.apache.hadoop.io.BytesWritable;
 
+/**
+ * HiveResultSet.
+ *
+ */
 public class HiveResultSet implements java.sql.ResultSet {
   HiveInterface client;
   ArrayList<?> row;
@@ -79,7 +83,7 @@
   }
 
   /**
-   * Instantiate the dynamic serde used to deserialize the result row
+   * Instantiate the dynamic serde used to deserialize the result row.
    */
   public void initDynamicSerde() throws SQLException {
     try {
@@ -111,7 +115,7 @@
       Properties dsp = new Properties();
       dsp.setProperty(Constants.SERIALIZATION_FORMAT,
           org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class
-              .getName());
+          .getName());
       dsp.setProperty(
           org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME,
           "result");
@@ -295,8 +299,7 @@
    * @see java.sql.ResultSet#getBigDecimal(int, int)
    */
 
-  public BigDecimal getBigDecimal(int columnIndex, int scale)
-      throws SQLException {
+  public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -307,8 +310,7 @@
    * @see java.sql.ResultSet#getBigDecimal(java.lang.String, int)
    */
 
-  public BigDecimal getBigDecimal(String columnName, int scale)
-      throws SQLException {
+  public BigDecimal getBigDecimal(String columnName, int scale) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -832,8 +834,7 @@
    * @see java.sql.ResultSet#getObject(java.lang.String, java.util.Map)
    */
 
-  public Object getObject(String colName, Map<String, Class<?>> map)
-      throws SQLException {
+  public Object getObject(String colName, Map<String, Class<?>> map) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1055,8 +1056,7 @@
    * @see java.sql.ResultSet#getTimestamp(int, java.util.Calendar)
    */
 
-  public Timestamp getTimestamp(int columnIndex, Calendar cal)
-      throws SQLException {
+  public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1067,8 +1067,7 @@
    * @see java.sql.ResultSet#getTimestamp(java.lang.String, java.util.Calendar)
    */
 
-  public Timestamp getTimestamp(String columnName, Calendar cal)
-      throws SQLException {
+  public Timestamp getTimestamp(String columnName, Calendar cal) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1250,12 +1249,12 @@
       return false;
     }
 
-    String row_str = "";
+    String rowStr = "";
     try {
-      row_str = (String) client.fetchOne();
+      rowStr = (String) client.fetchOne();
       rowsFetched++;
-      if (!row_str.equals("")) {
-        Object o = ds.deserialize(new BytesWritable(row_str.getBytes()));
+      if (!rowStr.equals("")) {
+        Object o = ds.deserialize(new BytesWritable(rowStr.getBytes()));
         row = (ArrayList<?>) o;
       }
     } catch (Exception ex) {
@@ -1263,7 +1262,7 @@
       throw new SQLException("Error retrieving next row");
     }
     // NOTE: fetchOne dosn't throw new SQLException("Method not supported").
-    return !row_str.equals("");
+    return !rowStr.equals("");
   }
 
   /*
@@ -1382,8 +1381,7 @@
    * @see java.sql.ResultSet#updateAsciiStream(int, java.io.InputStream)
    */
 
-  public void updateAsciiStream(int columnIndex, InputStream x)
-      throws SQLException {
+  public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1395,8 +1393,7 @@
    * java.io.InputStream)
    */
 
-  public void updateAsciiStream(String columnLabel, InputStream x)
-      throws SQLException {
+  public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1457,8 +1454,7 @@
    * @see java.sql.ResultSet#updateBigDecimal(int, java.math.BigDecimal)
    */
 
-  public void updateBigDecimal(int columnIndex, BigDecimal x)
-      throws SQLException {
+  public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1470,8 +1466,7 @@
    * java.math.BigDecimal)
    */
 
-  public void updateBigDecimal(String columnName, BigDecimal x)
-      throws SQLException {
+  public void updateBigDecimal(String columnName, BigDecimal x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1482,8 +1477,7 @@
    * @see java.sql.ResultSet#updateBinaryStream(int, java.io.InputStream)
    */
 
-  public void updateBinaryStream(int columnIndex, InputStream x)
-      throws SQLException {
+  public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1495,8 +1489,7 @@
    * java.io.InputStream)
    */
 
-  public void updateBinaryStream(String columnLabel, InputStream x)
-      throws SQLException {
+  public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1579,8 +1572,7 @@
    * @see java.sql.ResultSet#updateBlob(int, java.io.InputStream)
    */
 
-  public void updateBlob(int columnIndex, InputStream inputStream)
-      throws SQLException {
+  public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1591,8 +1583,7 @@
    * @see java.sql.ResultSet#updateBlob(java.lang.String, java.io.InputStream)
    */
 
-  public void updateBlob(String columnLabel, InputStream inputStream)
-      throws SQLException {
+  public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1694,8 +1685,7 @@
    * @see java.sql.ResultSet#updateCharacterStream(int, java.io.Reader)
    */
 
-  public void updateCharacterStream(int columnIndex, Reader x)
-      throws SQLException {
+  public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1707,8 +1697,7 @@
    * java.io.Reader)
    */
 
-  public void updateCharacterStream(String columnLabel, Reader reader)
-      throws SQLException {
+  public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1813,8 +1802,7 @@
    * @see java.sql.ResultSet#updateClob(int, java.io.Reader, long)
    */
 
-  public void updateClob(int columnIndex, Reader reader, long length)
-      throws SQLException {
+  public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1825,8 +1813,7 @@
    * @see java.sql.ResultSet#updateClob(java.lang.String, java.io.Reader, long)
    */
 
-  public void updateClob(String columnLabel, Reader reader, long length)
-      throws SQLException {
+  public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1947,8 +1934,7 @@
    * @see java.sql.ResultSet#updateNCharacterStream(int, java.io.Reader)
    */
 
-  public void updateNCharacterStream(int columnIndex, Reader x)
-      throws SQLException {
+  public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1960,8 +1946,7 @@
    * java.io.Reader)
    */
 
-  public void updateNCharacterStream(String columnLabel, Reader reader)
-      throws SQLException {
+  public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -1972,8 +1957,7 @@
    * @see java.sql.ResultSet#updateNCharacterStream(int, java.io.Reader, long)
    */
 
-  public void updateNCharacterStream(int columnIndex, Reader x, long length)
-      throws SQLException {
+  public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2030,8 +2014,7 @@
    * @see java.sql.ResultSet#updateNClob(java.lang.String, java.io.Reader)
    */
 
-  public void updateNClob(String columnLabel, Reader reader)
-      throws SQLException {
+  public void updateNClob(String columnLabel, Reader reader) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2042,8 +2025,7 @@
    * @see java.sql.ResultSet#updateNClob(int, java.io.Reader, long)
    */
 
-  public void updateNClob(int columnIndex, Reader reader, long length)
-      throws SQLException {
+  public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2054,8 +2036,7 @@
    * @see java.sql.ResultSet#updateNClob(java.lang.String, java.io.Reader, long)
    */
 
-  public void updateNClob(String columnLabel, Reader reader, long length)
-      throws SQLException {
+  public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2077,8 +2058,7 @@
    * @see java.sql.ResultSet#updateNString(java.lang.String, java.lang.String)
    */
 
-  public void updateNString(String columnLabel, String string)
-      throws SQLException {
+  public void updateNString(String columnLabel, String string) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2133,8 +2113,7 @@
    * @see java.sql.ResultSet#updateObject(int, java.lang.Object, int)
    */
 
-  public void updateObject(int columnIndex, Object x, int scale)
-      throws SQLException {
+  public void updateObject(int columnIndex, Object x, int scale) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2146,8 +2125,7 @@
    * int)
    */
 
-  public void updateObject(String columnName, Object x, int scale)
-      throws SQLException {
+  public void updateObject(String columnName, Object x, int scale) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2213,8 +2191,7 @@
    * @see java.sql.ResultSet#updateSQLXML(int, java.sql.SQLXML)
    */
 
-  public void updateSQLXML(int columnIndex, SQLXML xmlObject)
-      throws SQLException {
+  public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2225,8 +2202,7 @@
    * @see java.sql.ResultSet#updateSQLXML(java.lang.String, java.sql.SQLXML)
    */
 
-  public void updateSQLXML(String columnLabel, SQLXML xmlObject)
-      throws SQLException {
+  public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -2315,8 +2291,7 @@
    * java.sql.Timestamp)
    */
 
-  public void updateTimestamp(String columnName, Timestamp x)
-      throws SQLException {
+  public void updateTimestamp(String columnName, Timestamp x) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java Tue Feb  9 07:55:30 2010
@@ -25,6 +25,10 @@
 
 import org.apache.hadoop.hive.serde.Constants;
 
+/**
+ * HiveResultSetMetaData.
+ *
+ */
 public class HiveResultSetMetaData implements java.sql.ResultSetMetaData {
   List<String> columnNames;
   List<String> columnTypes;

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveStatement.java Tue Feb  9 07:55:30 2010
@@ -26,6 +26,10 @@
 import org.apache.hadoop.hive.service.HiveInterface;
 import org.apache.hadoop.hive.service.HiveServerException;
 
+/**
+ * HiveStatement.
+ *
+ */
 public class HiveStatement implements java.sql.Statement {
   JdbcSessionState session;
   HiveInterface client;
@@ -34,22 +38,22 @@
    * <code>
    * statement.execute(String sql);
    * statement.getResultSet();
-   * </code>
+   * </code>.
    */
   ResultSet resultSet = null;
 
   /**
-   * The maximum number of rows this statement should return (0 => all rows)
+   * The maximum number of rows this statement should return (0 => all rows).
    */
   int maxRows = 0;
 
   /**
-   * Add SQLWarnings to the warningChain if needed
+   * Add SQLWarnings to the warningChain if needed.
    */
   SQLWarning warningChain = null;
 
   /**
-   * Keep state so we can fail certain calls made after close();
+   * Keep state so we can fail certain calls made after close().
    */
   boolean isClosed = false;
 
@@ -219,8 +223,7 @@
    * @see java.sql.Statement#executeUpdate(java.lang.String, int)
    */
 
-  public int executeUpdate(String sql, int autoGeneratedKeys)
-      throws SQLException {
+  public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }
@@ -242,8 +245,7 @@
    * @see java.sql.Statement#executeUpdate(java.lang.String, java.lang.String[])
    */
 
-  public int executeUpdate(String sql, String[] columnNames)
-      throws SQLException {
+  public int executeUpdate(String sql, String[] columnNames) throws SQLException {
     // TODO Auto-generated method stub
     throw new SQLException("Method not supported");
   }

Modified: hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcSessionState.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcSessionState.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcSessionState.java (original)
+++ hadoop/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcSessionState.java Tue Feb  9 07:55:30 2010
@@ -21,6 +21,10 @@
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
+/**
+ * JdbcSessionState.
+ *
+ */
 public class JdbcSessionState extends SessionState {
 
   public JdbcSessionState() {

Modified: hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hadoop/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Tue Feb  9 07:55:30 2010
@@ -33,6 +33,10 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 
+/**
+ * TestJdbcDriver.
+ *
+ */
 public class TestJdbcDriver extends TestCase {
   private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
   private static String tableName = "testHiveDriverTable";
@@ -136,17 +140,16 @@
 
   public final void testSelectAllPartioned() throws Exception {
     doTestSelectAll(partitionedTableName, -1); // tests not setting maxRows
-                                               // (return all)
+    // (return all)
     doTestSelectAll(partitionedTableName, 0); // tests setting maxRows to 0
-                                              // (return all)
+    // (return all)
   }
 
   public final void testSelectAllMaxRows() throws Exception {
     doTestSelectAll(tableName, 100);
   }
 
-  private final void doTestSelectAll(String tableName, int maxRows)
-      throws Exception {
+  private void doTestSelectAll(String tableName, int maxRows) throws Exception {
     Statement stmt = con.createStatement();
     if (maxRows >= 0) {
       stmt.setMaxRows(maxRows);
@@ -241,7 +244,8 @@
     // code. This should be refactored.
     doTestErrorCase(
         "create table " + tableName + " (key int, value string)",
-        "Query returned non-zero code: 9, cause: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask",
+        "Query returned non-zero code: 9, cause: FAILED: Execution Error, "
+        + "return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask",
         "08S01", 9);
   }
 
@@ -324,7 +328,7 @@
     res = stmt.executeQuery("create table " + tableName
         + " (a string, b boolean, c bigint, d int, f double)");
     res = stmt.executeQuery(
-      "select a,b,c,d,f as e,f*2 from " + tableName + " limit 1");
+        "select a,b,c,d,f as e,f*2 from " + tableName + " limit 1");
 
     ResultSetMetaData meta = res.getMetaData();
     assertEquals("Unexpected column count", 6, meta.getColumnCount());
@@ -340,30 +344,18 @@
     assertEquals("Unexpected column type", Types.INTEGER, meta.getColumnType(4));
     assertEquals("Unexpected column type", Types.DOUBLE, meta.getColumnType(5));
     assertEquals("Unexpected column type", Types.DOUBLE, meta.getColumnType(6));
-    assertEquals("Unexpected column type name", "string", meta
-        .getColumnTypeName(1));
-    assertEquals("Unexpected column type name", "boolean", meta
-        .getColumnTypeName(2));
-    assertEquals("Unexpected column type name", "bigint", meta
-        .getColumnTypeName(3));
-    assertEquals("Unexpected column type name", "int", meta
-        .getColumnTypeName(4));
-    assertEquals("Unexpected column type name", "double", meta
-        .getColumnTypeName(5));
-    assertEquals("Unexpected column type name", "double", meta
-        .getColumnTypeName(6));
-    assertEquals("Unexpected column display size", 32, meta
-        .getColumnDisplaySize(1));
-    assertEquals("Unexpected column display size", 8, meta
-        .getColumnDisplaySize(2));
-    assertEquals("Unexpected column display size", 32, meta
-        .getColumnDisplaySize(3));
-    assertEquals("Unexpected column display size", 16, meta
-        .getColumnDisplaySize(4));
-    assertEquals("Unexpected column display size", 16, meta
-        .getColumnDisplaySize(5));
-    assertEquals("Unexpected column display size", 16, meta
-        .getColumnDisplaySize(6));
+    assertEquals("Unexpected column type name", "string", meta.getColumnTypeName(1));
+    assertEquals("Unexpected column type name", "boolean", meta.getColumnTypeName(2));
+    assertEquals("Unexpected column type name", "bigint", meta.getColumnTypeName(3));
+    assertEquals("Unexpected column type name", "int", meta.getColumnTypeName(4));
+    assertEquals("Unexpected column type name", "double", meta.getColumnTypeName(5));
+    assertEquals("Unexpected column type name", "double", meta.getColumnTypeName(6));
+    assertEquals("Unexpected column display size", 32, meta.getColumnDisplaySize(1));
+    assertEquals("Unexpected column display size", 8, meta.getColumnDisplaySize(2));
+    assertEquals("Unexpected column display size", 32, meta.getColumnDisplaySize(3));
+    assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(4));
+    assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(5));
+    assertEquals("Unexpected column display size", 16, meta.getColumnDisplaySize(6));
 
     for (int i = 1; i <= 6; i++) {
       assertFalse(meta.isAutoIncrement(i));
@@ -380,10 +372,10 @@
 
   // [url] [host] [port] [db]
   private static final String[][] URL_PROPERTIES = new String[][] {
-      { "jdbc:hive://", "", "", "default" },
-      { "jdbc:hive://localhost:10001/default", "localhost", "10001", "default" },
-      { "jdbc:hive://localhost/notdefault", "localhost", "10000", "notdefault" },
-      { "jdbc:hive://foo:1243", "foo", "1243", "default" } };
+      {"jdbc:hive://", "", "", "default"},
+      {"jdbc:hive://localhost:10001/default", "localhost", "10001", "default"},
+      {"jdbc:hive://localhost/notdefault", "localhost", "10000", "notdefault"},
+      {"jdbc:hive://foo:1243", "foo", "1243", "default"}};
 
   public void testDriverProperties() throws SQLException {
     HiveDriver driver = new HiveDriver();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Tue Feb  9 07:55:30 2010
@@ -19,12 +19,10 @@
 package org.apache.hadoop.hive.ql;
 
 import java.io.DataInput;
-import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
@@ -48,11 +46,11 @@
   private Path resFile;
   private Path resDir;
   private FileSystem resFs;
-  static final private Log LOG = LogFactory.getLog("hive.ql.Context");
+  private static final Log LOG = LogFactory.getLog("hive.ql.Context");
   private Path[] resDirPaths;
   private int resDirFilesNum;
   boolean initialized;
-  
+
   // Path without a file system
   // hive.exec.scratchdir: default: "/tmp/"+System.getProperty("user.name")+"/hive"
   // Used for creating temporary path on external file systems
@@ -65,12 +63,12 @@
   // also based on hive.exec.scratchdir which by default is
   // "/tmp/"+System.getProperty("user.name")+"/hive"
   private Path MRScratchDir;
-  
+
   // allScratchDirs contains all scratch directories including
   // localScratchDir and MRScratchDir.
   // The external scratch dirs will be also based on hive.exec.scratchdir.
-  private final Map<String,Path> externalScratchDirs = new HashMap<String,Path>();
-  
+  private final Map<String, Path> externalScratchDirs = new HashMap<String, Path>();
+
   private HiveConf conf;
   protected int pathid = 10000;
   protected boolean explain = false;
@@ -94,7 +92,7 @@
   }
 
   /**
-   * Set the context on whether the current query is an explain query
+   * Set the context on whether the current query is an explain query.
    * 
    * @param value
    *          true if the query is an explain query, false if not
@@ -104,7 +102,7 @@
   }
 
   /**
-   * Find out whether the current query is an explain query
+   * Find out whether the current query is an explain query.
    * 
    * @return true if the query is an explain query, false if not
    */
@@ -115,16 +113,16 @@
   /**
    * Make a tmp directory for MR intermediate data If URI/Scheme are not
    * supplied - those implied by the default filesystem will be used (which will
-   * typically correspond to hdfs instance on hadoop cluster)
+   * typically correspond to hdfs instance on hadoop cluster).
    * 
    * @param mkdir  if true, will make the directory. Will throw IOException if that fails.
    */
   private static Path makeMRScratchDir(HiveConf conf, String executionId, boolean mkdir)
       throws IOException {
-    
+
     Path dir = FileUtils.makeQualified(
         new Path(conf.getVar(HiveConf.ConfVars.SCRATCHDIR), executionId), conf);
-    
+
     if (mkdir) {
       FileSystem fs = dir.getFileSystem(conf);
       if (!fs.mkdirs(dir)) {
@@ -136,14 +134,14 @@
 
   /**
    * Make a tmp directory on specified URI Currently will use the same path as
-   * implied by SCRATCHDIR config variable
+   * implied by SCRATCHDIR config variable.
    */
   private static Path makeExternalScratchDir(HiveConf conf, String executionId,
       boolean mkdir, URI extURI) throws IOException {
-    
+
     Path dir = new Path(extURI.getScheme(), extURI.getAuthority(),
         conf.getVar(HiveConf.ConfVars.SCRATCHDIR) + Path.SEPARATOR + executionId);
-    
+
     if (mkdir) {
       FileSystem fs = dir.getFileSystem(conf);
       if (!fs.mkdirs(dir)) {
@@ -152,7 +150,7 @@
     }
     return dir;
   }
-  
+
   /**
    * Make a tmp directory for local file system.
    * 
@@ -160,12 +158,12 @@
    */
   private static Path makeLocalScratchDir(HiveConf conf, String executionId, boolean mkdir)
       throws IOException {
-    
+
     FileSystem fs = FileSystem.getLocal(conf);
     Path dir = fs.makeQualified(new Path(System.getProperty("java.io.tmpdir")
         + Path.SEPARATOR + System.getProperty("user.name") + Path.SEPARATOR
         + executionId));
-    
+
     if (mkdir) {
       if (!fs.mkdirs(dir)) {
         throw new IOException("Cannot make directory: " + dir);
@@ -176,7 +174,7 @@
 
   /**
    * Get a tmp directory on specified URI Will check if this has already been
-   * made (either via MR or Local FileSystem or some other external URI
+   * made (either via MR or Local FileSystem or some other external URI.
    */
   private String getExternalScratchDir(URI extURI) {
     try {
@@ -193,7 +191,7 @@
   }
 
   /**
-   * Create a map-reduce scratch directory on demand and return it
+   * Create a map-reduce scratch directory on demand and return it.
    */
   private String getMRScratchDir() {
     try {
@@ -210,7 +208,7 @@
   }
 
   /**
-   * Create a local scratch directory on demand and return it
+   * Create a local scratch directory on demand and return it.
    */
   private String getLocalScratchDir() {
     try {
@@ -234,22 +232,22 @@
           + StringUtils.stringifyException(e));
     }
   }
-  
+
   /**
-   * Remove any created scratch directories
+   * Remove any created scratch directories.
    */
   private void removeScratchDir() {
-    
-    for (Map.Entry<String,Path> p : externalScratchDirs.entrySet()) {
+
+    for (Map.Entry<String, Path> p : externalScratchDirs.entrySet()) {
       removeDir(p.getValue());
     }
     externalScratchDirs.clear();
-    
+
     if (MRScratchDir != null) {
       removeDir(MRScratchDir);
       MRScratchDir = null;
     }
-    
+
     if (localScratchDir != null) {
       removeDir(localScratchDir);
       localScratchDir = null;
@@ -257,15 +255,15 @@
   }
 
   /**
-   * Return the next available path in the current scratch dir
+   * Return the next available path in the current scratch dir.
    */
   private String nextPath(String base) {
     return base + Path.SEPARATOR + Integer.toString(pathid++);
   }
 
   /**
-   * check if path is tmp path. the assumption is that all uri's relative to
-   * scratchdir are temporary
+   * Check if path is tmp path. the assumption is that all uri's relative to
+   * scratchdir are temporary.
    * 
    * @return true if a uri is a temporary uri for map-reduce intermediate data,
    *         false otherwise
@@ -275,7 +273,7 @@
   }
 
   /**
-   * Get a path to store map-reduce intermediate data in
+   * Get a path to store map-reduce intermediate data in.
    * 
    * @return next available path for map-red intermediate data
    */
@@ -284,7 +282,7 @@
   }
 
   /**
-   * Get a tmp path on local host to store intermediate data
+   * Get a tmp path on local host to store intermediate data.
    * 
    * @return next available tmp path on local fs
    */
@@ -293,7 +291,7 @@
   }
 
   /**
-   * Get a path to store tmp data destined for external URI
+   * Get a path to store tmp data destined for external URI.
    * 
    * @param extURI
    *          external URI to which the tmp data has to be eventually moved
@@ -419,7 +417,7 @@
   }
 
   /**
-   * Little abbreviation for StringUtils
+   * Little abbreviation for StringUtils.
    */
   private static boolean strEquals(String str1, String str2) {
     return org.apache.commons.lang.StringUtils.equals(str1, str2);
@@ -448,7 +446,7 @@
   public TokenRewriteStream getTokenRewriteStream() {
     return tokenRewriteStream;
   }
-  
+
   /**
    * Generate a unique executionId.  An executionId, together with user name and
    * the configuration, will determine the temporary locations of all intermediate
@@ -463,5 +461,5 @@
         + Math.abs(rand.nextLong());
     return executionId;
   }
-  
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Tue Feb  9 07:55:30 2010
@@ -684,12 +684,12 @@
       }
 
       bos.reset();
-      Utilities.streamStatus ss;
+      Utilities.StreamStatus ss;
       try {
         ss = Utilities.readColumn(resStream, bos);
         if (bos.getCount() > 0) {
           row = new String(bos.getData(), 0, bos.getCount(), "UTF-8");
-        } else if (ss == Utilities.streamStatus.TERMINATED) {
+        } else if (ss == Utilities.StreamStatus.TERMINATED) {
           row = new String();
         }
 
@@ -704,7 +704,7 @@
         return false;
       }
 
-      if (ss == Utilities.streamStatus.EOF) {
+      if (ss == Utilities.StreamStatus.EOF) {
         resStream = ctx.getStream();
       }
     }
@@ -717,10 +717,10 @@
     } catch (Exception e) {
       console.printError("FAILED: Unknown exception : " + e.getMessage(), "\n"
           + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      return (13);
+      return 13;
     }
 
-    return (0);
+    return 0;
   }
 
   public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/DriverContext.java Tue Feb  9 07:55:30 2010
@@ -24,12 +24,16 @@
 
 import org.apache.hadoop.hive.ql.exec.Task;
 
+/**
+ * DriverContext.
+ *
+ */
 public class DriverContext {
 
   Queue<Task<? extends Serializable>> runnable = new LinkedList<Task<? extends Serializable>>();
-  
+
   // how many jobs have been started
-  int curJobNo; 
+  int curJobNo;
 
   public DriverContext(Queue<Task<? extends Serializable>> runnable) {
     this.runnable = runnable;
@@ -40,7 +44,7 @@
   }
 
   /**
-   * Checks if a task can be launched
+   * Checks if a task can be launched.
    * 
    * @param tsk
    *          the task to be checked