You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/21 11:38:15 UTC

svn commit: r901644 [37/37] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/PerformTestRCFileAndSeqFile.java Thu Jan 21 10:37:58 2010
@@ -20,7 +20,7 @@
 
 public class PerformTestRCFileAndSeqFile extends TestCase {
 
-  private Configuration conf = new Configuration();
+  private final Configuration conf = new Configuration();
 
   private Path testRCFile;
   private Path testSeqFile;
@@ -35,23 +35,24 @@
 
   public PerformTestRCFileAndSeqFile(boolean local, String file)
       throws IOException {
-    if (local)
+    if (local) {
       fs = FileSystem.getLocal(conf);
-    else
+    } else {
       fs = FileSystem.get(conf);
+    }
     conf.setInt(RCFile.Writer.COLUMNS_BUFFER_SIZE_CONF_STR, 1 * 1024 * 1024);
     if (file == null) {
       Path dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
-      this.testRCFile = new Path(dir, "test_rcfile");
-      this.testSeqFile = new Path(dir, "test_seqfile");
+      testRCFile = new Path(dir, "test_rcfile");
+      testSeqFile = new Path(dir, "test_seqfile");
     } else {
-      this.testRCFile = new Path(file + "-rcfile");
-      this.testSeqFile = new Path(file + "-seqfile");
+      testRCFile = new Path(file + "-rcfile");
+      testSeqFile = new Path(file + "-seqfile");
     }
     fs.delete(testRCFile, true);
-    fs.delete(this.testSeqFile, true);
-    System.out.println("RCFile:" + this.testRCFile.toString());
-    System.out.println("SequenceFile:" + this.testSeqFile.toString());
+    fs.delete(testSeqFile, true);
+    System.out.println("RCFile:" + testRCFile.toString());
+    System.out.println("SequenceFile:" + testSeqFile.toString());
   }
 
   private void writeSeqenceFileTest(FileSystem fs, int rowCount, Path file,
@@ -115,10 +116,11 @@
   private void nextRandomRow(byte[][] row, BytesRefArrayWritable bytes) {
     bytes.resetValid(row.length);
     for (int i = 0; i < row.length; i++) {
-      int len = Math.abs(randColLenGenerator.nextInt(this.columnMaxSize));
+      int len = Math.abs(randColLenGenerator.nextInt(columnMaxSize));
       row[i] = new byte[len];
-      for (int j = 0; j < len; j++)
-        row[i][j] = getRandomChar(this.randomCharGenerator);
+      for (int j = 0; j < len; j++) {
+        row[i][j] = getRandomChar(randomCharGenerator);
+      }
       bytes.get(i).set(row[i], 0, len);
     }
   }
@@ -130,14 +132,12 @@
     do {
       b = (byte) random.nextInt(CHAR_END);
     } while ((b < 65));
-    if (b > 90)
+    if (b > 90) {
       b += 7;
+    }
     return b;
   }
 
-  private static String usage = "Usage: PerformTestRCFileAndSeqFile "
-      + "[-count N]" + " [file]";
-
   public static void main(String[] args) throws Exception {
     int count = 1000;
     String file = null;
@@ -191,9 +191,9 @@
 
     // sequence file write
     start = System.currentTimeMillis();
-    writeSeqenceFileTest(fs, rowCount, this.testSeqFile, columnNum, codec);
+    writeSeqenceFileTest(fs, rowCount, testSeqFile, columnNum, codec);
     cost = System.currentTimeMillis() - start;
-    fileLen = fs.getFileStatus(this.testSeqFile).getLen();
+    fileLen = fs.getFileStatus(testSeqFile).getLen();
     System.out.println("Write SequenceFile with " + columnNum
         + " random string columns and " + rowCount + " rows cost " + cost
         + " milliseconds. And the file's on disk size is " + fileLen);
@@ -206,13 +206,14 @@
     System.out.println("Read only one column of a RCFile with " + columnNum
         + " random string columns and " + rowCount + " rows cost " + cost
         + " milliseconds.");
-    if (rowCount != readRows)
+    if (rowCount != readRows) {
       throw new IllegalStateException("Compare read and write row count error.");
+    }
     assertEquals("", rowCount, readRows);
 
     if (isLocalFileSystem() && !checkCorrect) {
       // make some noisy to avoid disk caches data.
-      performSequenceFileRead(fs, rowCount, this.testSeqFile);
+      performSequenceFileRead(fs, rowCount, testSeqFile);
     }
 
     start = System.currentTimeMillis();
@@ -222,13 +223,14 @@
     System.out.println("Read only first and last columns of a RCFile with "
         + columnNum + " random string columns and " + rowCount + " rows cost "
         + cost + " milliseconds.");
-    if (rowCount != readRows)
+    if (rowCount != readRows) {
       throw new IllegalStateException("Compare read and write row count error.");
+    }
     assertEquals("", rowCount, readRows);
 
     if (isLocalFileSystem() && !checkCorrect) {
       // make some noisy to avoid disk caches data.
-      performSequenceFileRead(fs, rowCount, this.testSeqFile);
+      performSequenceFileRead(fs, rowCount, testSeqFile);
     }
 
     start = System.currentTimeMillis();
@@ -237,13 +239,14 @@
     System.out.println("Read all columns of a RCFile with " + columnNum
         + " random string columns and " + rowCount + " rows cost " + cost
         + " milliseconds.");
-    if (rowCount != readRows)
+    if (rowCount != readRows) {
       throw new IllegalStateException("Compare read and write row count error.");
+    }
     assertEquals("", rowCount, readRows);
 
     // sequence file read
     start = System.currentTimeMillis();
-    performSequenceFileRead(fs, rowCount, this.testSeqFile);
+    performSequenceFileRead(fs, rowCount, testSeqFile);
     cost = System.currentTimeMillis() - start;
     System.out.println("Read SequenceFile with " + columnNum
         + "  random string columns and " + rowCount + " rows cost " + cost
@@ -259,8 +262,9 @@
     SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
     ByteWritable key = new ByteWritable();
     BytesRefArrayWritable val = new BytesRefArrayWritable();
-    for (int i = 0; i < count; i++)
+    for (int i = 0; i < count; i++) {
       reader.next(key, val);
+    }
   }
 
   public int performRCFileReadFirstColumnTest(FileSystem fs, Path file,
@@ -269,7 +273,7 @@
     byte[][] checkBytes = null;
     BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
     if (chechCorrect) {
-      this.resetRandomGenerators();
+      resetRandomGenerators();
       checkBytes = new byte[allColumnsNumber][];
     }
 
@@ -286,11 +290,12 @@
       reader.getCurrentRow(cols);
       boolean ok = true;
       if (chechCorrect) {
-        this.nextRandomRow(checkBytes, checkRow);
+        nextRandomRow(checkBytes, checkRow);
         ok = ok && (checkRow.get(0).equals(cols.get(0)));
       }
-      if (!ok)
+      if (!ok) {
         throw new IllegalStateException("Compare read and write error.");
+      }
       actualReadCount++;
     }
     return actualReadCount;
@@ -302,7 +307,7 @@
     byte[][] checkBytes = null;
     BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
     if (chechCorrect) {
-      this.resetRandomGenerators();
+      resetRandomGenerators();
       checkBytes = new byte[allColumnsNumber][];
     }
 
@@ -320,14 +325,15 @@
       reader.getCurrentRow(cols);
       boolean ok = true;
       if (chechCorrect) {
-        this.nextRandomRow(checkBytes, checkRow);
+        nextRandomRow(checkBytes, checkRow);
         ok = ok && (checkRow.get(0).equals(cols.get(0)));
         ok = ok
             && checkRow.get(allColumnsNumber - 1).equals(
                 cols.get(allColumnsNumber - 1));
       }
-      if (!ok)
+      if (!ok) {
         throw new IllegalStateException("Compare read and write error.");
+      }
       actualReadCount++;
     }
     return actualReadCount;
@@ -339,7 +345,7 @@
     byte[][] checkBytes = null;
     BytesRefArrayWritable checkRow = new BytesRefArrayWritable(allColumnsNumber);
     if (chechCorrect) {
-      this.resetRandomGenerators();
+      resetRandomGenerators();
       checkBytes = new byte[allColumnsNumber][];
     }
 
@@ -354,11 +360,12 @@
       reader.getCurrentRow(cols);
       boolean ok = true;
       if (chechCorrect) {
-        this.nextRandomRow(checkBytes, checkRow);
+        nextRandomRow(checkBytes, checkRow);
         ok = ok && checkRow.equals(cols);
       }
-      if (!ok)
+      if (!ok) {
         throw new IllegalStateException("Compare read and write error.");
+      }
       actualReadCount++;
     }
     return actualReadCount;

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/RecordTestObj.java Thu Jan 21 10:37:58 2010
@@ -6,44 +6,60 @@
   private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter;
   private static int[] _rio_rtiFilterFields;
   static {
-    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo("RecordTestObj");
-    _rio_recTypeInfo.addField("s", org.apache.hadoop.record.meta.TypeID.StringTypeID);
-    _rio_recTypeInfo.addField("num", org.apache.hadoop.record.meta.TypeID.LongTypeID);
+    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo(
+        "RecordTestObj");
+    _rio_recTypeInfo.addField("s",
+        org.apache.hadoop.record.meta.TypeID.StringTypeID);
+    _rio_recTypeInfo.addField("num",
+        org.apache.hadoop.record.meta.TypeID.LongTypeID);
   }
-  
+
   private String s;
   private long num;
-  public RecordTestObj() { }
-  public RecordTestObj(
-    final String s,
-    final long num) {
+
+  public RecordTestObj() {
+  }
+
+  public RecordTestObj(final String s, final long num) {
     this.s = s;
     this.num = num;
   }
+
   public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
     return _rio_recTypeInfo;
   }
-  public static void setTypeFilter(org.apache.hadoop.record.meta.RecordTypeInfo rti) {
-    if (null == rti) return;
+
+  public static void setTypeFilter(
+      org.apache.hadoop.record.meta.RecordTypeInfo rti) {
+    if (null == rti) {
+      return;
+    }
     _rio_rtiFilter = rti;
     _rio_rtiFilterFields = null;
   }
-  private static void setupRtiFields()
-  {
-    if (null == _rio_rtiFilter) return;
+
+  private static void setupRtiFields() {
+    if (null == _rio_rtiFilter) {
+      return;
+    }
     // we may already have done this
-    if (null != _rio_rtiFilterFields) return;
+    if (null != _rio_rtiFilterFields) {
+      return;
+    }
     int _rio_i, _rio_j;
-    _rio_rtiFilterFields = new int [_rio_rtiFilter.getFieldTypeInfos().size()];
-    for (_rio_i=0; _rio_i<_rio_rtiFilterFields.length; _rio_i++) {
+    _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
+    for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
       _rio_rtiFilterFields[_rio_i] = 0;
     }
-    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter.getFieldTypeInfos().iterator();
-    _rio_i=0;
+    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
+        .getFieldTypeInfos().iterator();
+    _rio_i = 0;
     while (_rio_itFilter.hasNext()) {
-      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter.next();
-      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo.getFieldTypeInfos().iterator();
-      _rio_j=1;
+      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
+          .next();
+      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
+          .getFieldTypeInfos().iterator();
+      _rio_j = 1;
       while (_rio_it.hasNext()) {
         org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
         if (_rio_tInfo.equals(_rio_tInfoFilter)) {
@@ -55,34 +71,44 @@
       _rio_i++;
     }
   }
+
   public String getS() {
     return s;
   }
+
   public void setS(final String s) {
-    this.s=s;
+    this.s = s;
   }
+
   public long getNum() {
     return num;
   }
+
   public void setNum(final long num) {
-    this.num=num;
-  }
-  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
-    _rio_a.startRecord(this,_rio_tag);
-    _rio_a.writeString(s,"s");
-    _rio_a.writeLong(num,"num");
-    _rio_a.endRecord(this,_rio_tag);
+    this.num = num;
   }
-  private void deserializeWithoutFilter(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
+
+  @Override
+  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a,
+      final String _rio_tag) throws java.io.IOException {
+    _rio_a.startRecord(this, _rio_tag);
+    _rio_a.writeString(s, "s");
+    _rio_a.writeLong(num, "num");
+    _rio_a.endRecord(this, _rio_tag);
+  }
+
+  private void deserializeWithoutFilter(
+      final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
+      throws java.io.IOException {
     _rio_a.startRecord(_rio_tag);
-    s=_rio_a.readString("s");
-    num=_rio_a.readLong("num");
+    s = _rio_a.readString("s");
+    num = _rio_a.readLong("num");
     _rio_a.endRecord(_rio_tag);
   }
-  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
+
+  @Override
+  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a,
+      final String _rio_tag) throws java.io.IOException {
     if (null == _rio_rtiFilter) {
       deserializeWithoutFilter(_rio_a, _rio_tag);
       return;
@@ -90,32 +116,40 @@
     // if we're here, we need to read based on version info
     _rio_a.startRecord(_rio_tag);
     setupRtiFields();
-    for (int _rio_i=0; _rio_i<_rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
+    for (int _rio_i = 0; _rio_i < _rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
       if (1 == _rio_rtiFilterFields[_rio_i]) {
-        s=_rio_a.readString("s");
-      }
-      else if (2 == _rio_rtiFilterFields[_rio_i]) {
-        num=_rio_a.readLong("num");
-      }
-      else {
-        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>)(_rio_rtiFilter.getFieldTypeInfos());
-        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i).getFieldID(), typeInfos.get(_rio_i).getTypeID());
+        s = _rio_a.readString("s");
+      } else if (2 == _rio_rtiFilterFields[_rio_i]) {
+        num = _rio_a.readLong("num");
+      } else {
+        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
+            .getFieldTypeInfos());
+        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
+            .getFieldID(), typeInfos.get(_rio_i).getTypeID());
       }
     }
     _rio_a.endRecord(_rio_tag);
   }
-  public int compareTo (final Object _rio_peer_) throws ClassCastException {
+
+  @Override
+  public int compareTo(final Object _rio_peer_) throws ClassCastException {
     if (!(_rio_peer_ instanceof RecordTestObj)) {
       throw new ClassCastException("Comparing different types of records.");
     }
     RecordTestObj _rio_peer = (RecordTestObj) _rio_peer_;
     int _rio_ret = 0;
     _rio_ret = s.compareTo(_rio_peer.s);
-    if (_rio_ret != 0) return _rio_ret;
-    _rio_ret = (num == _rio_peer.num)? 0 :((num<_rio_peer.num)?-1:1);
-    if (_rio_ret != 0) return _rio_ret;
+    if (_rio_ret != 0) {
+      return _rio_ret;
+    }
+    _rio_ret = (num == _rio_peer.num) ? 0 : ((num < _rio_peer.num) ? -1 : 1);
+    if (_rio_ret != 0) {
+      return _rio_ret;
+    }
     return _rio_ret;
   }
+
+  @Override
   public boolean equals(final Object _rio_peer_) {
     if (!(_rio_peer_ instanceof RecordTestObj)) {
       return false;
@@ -126,53 +160,68 @@
     RecordTestObj _rio_peer = (RecordTestObj) _rio_peer_;
     boolean _rio_ret = false;
     _rio_ret = s.equals(_rio_peer.s);
-    if (!_rio_ret) return _rio_ret;
-    _rio_ret = (num==_rio_peer.num);
-    if (!_rio_ret) return _rio_ret;
+    if (!_rio_ret) {
+      return _rio_ret;
+    }
+    _rio_ret = (num == _rio_peer.num);
+    if (!_rio_ret) {
+      return _rio_ret;
+    }
     return _rio_ret;
   }
+
+  @Override
   public Object clone() throws CloneNotSupportedException {
     RecordTestObj _rio_other = new RecordTestObj();
-    _rio_other.s = this.s;
-    _rio_other.num = this.num;
+    _rio_other.s = s;
+    _rio_other.num = num;
     return _rio_other;
   }
+
+  @Override
   public int hashCode() {
     int _rio_result = 17;
     int _rio_ret;
     _rio_ret = s.hashCode();
-    _rio_result = 37*_rio_result + _rio_ret;
-    _rio_ret = (int) (num^(num>>>32));
-    _rio_result = 37*_rio_result + _rio_ret;
+    _rio_result = 37 * _rio_result + _rio_ret;
+    _rio_ret = (int) (num ^ (num >>> 32));
+    _rio_result = 37 * _rio_result + _rio_ret;
     return _rio_result;
   }
+
   public static String signature() {
     return "LRecordTestObj(sl)";
   }
-  public static class Comparator extends org.apache.hadoop.record.RecordComparator {
+
+  public static class Comparator extends
+      org.apache.hadoop.record.RecordComparator {
     public Comparator() {
       super(RecordTestObj.class);
     }
+
     static public int slurpRaw(byte[] b, int s, int l) {
       try {
         int os = s;
         {
           int i = org.apache.hadoop.record.Utils.readVInt(b, s);
           int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-          s+=(z+i); l-= (z+i);
+          s += (z + i);
+          l -= (z + i);
         }
         {
           long i = org.apache.hadoop.record.Utils.readVLong(b, s);
           int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-          s+=z; l-=z;
+          s += z;
+          l -= z;
         }
         return (os - s);
-      } catch(java.io.IOException e) {
+      } catch (java.io.IOException e) {
         throw new RuntimeException(e);
       }
     }
-    static public int compareRaw(byte[] b1, int s1, int l1,
-                                   byte[] b2, int s2, int l2) {
+
+    static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2,
+        int l2) {
       try {
         int os1 = s1;
         {
@@ -180,33 +229,48 @@
           int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
           int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
           int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
-          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
-          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
-          if (r1 != 0) { return (r1<0)?-1:0; }
-          s1+=i1; s2+=i2; l1-=i1; l1-=i2;
+          s1 += z1;
+          s2 += z2;
+          l1 -= z1;
+          l2 -= z2;
+          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
+              s2, i2);
+          if (r1 != 0) {
+            return (r1 < 0) ? -1 : 0;
+          }
+          s1 += i1;
+          s2 += i2;
+          l1 -= i1;
+          l1 -= i2;
         }
         {
           long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
           long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
           if (i1 != i2) {
-            return ((i1-i2) < 0) ? -1 : 0;
+            return ((i1 - i2) < 0) ? -1 : 0;
           }
           int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
           int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
-          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
+          s1 += z1;
+          s2 += z2;
+          l1 -= z1;
+          l2 -= z2;
         }
         return (os1 - s1);
-      } catch(java.io.IOException e) {
+      } catch (java.io.IOException e) {
         throw new RuntimeException(e);
       }
     }
-    public int compare(byte[] b1, int s1, int l1,
-                         byte[] b2, int s2, int l2) {
-      int ret = compareRaw(b1,s1,l1,b2,s2,l2);
-      return (ret == -1)? -1 : ((ret==0)? 1 : 0);}
+
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+      int ret = compareRaw(b1, s1, l1, b2, s2, l2);
+      return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0);
+    }
   }
-  
+
   static {
-    org.apache.hadoop.record.RecordComparator.define(RecordTestObj.class, new Comparator());
+    org.apache.hadoop.record.RecordComparator.define(RecordTestObj.class,
+        new Comparator());
   }
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestFlatFileInputFormat.java Thu Jan 21 10:37:58 2010
@@ -18,33 +18,33 @@
 
 package org.apache.hadoop.hive.ql.io;
 
-import java.io.*;
-import java.util.*;
-import junit.framework.TestCase;
+import java.io.Serializable;
 
-import org.apache.commons.logging.*;
+import junit.framework.TestCase;
 
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.record.*;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.io.serializer.*;
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import org.apache.thrift.*;
-import org.apache.thrift.transport.*;
-import org.apache.thrift.protocol.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.serializer.JavaSerialization;
+import org.apache.hadoop.io.serializer.Serializer;
+import org.apache.hadoop.io.serializer.WritableSerialization;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
 
 //import org.apache.hadoop.contrib.serialization.thrift.*;
 
-public class TestFlatFileInputFormat extends TestCase  {
+public class TestFlatFileInputFormat extends TestCase {
 
   public void testFlatFileInputJava() throws Exception {
     Configuration conf;
-    JobConf job ;
+    JobConf job;
     FileSystem fs;
-    Path dir ;
+    Path dir;
     Path file;
     Reporter reporter;
     FSDataOutputStream ds;
@@ -56,18 +56,20 @@
       conf = new Configuration();
       job = new JobConf(conf);
       fs = FileSystem.getLocal(conf);
-      dir = new Path(System.getProperty("test.data.dir",".") + "/mapred");
+      dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
       file = new Path(dir, "test.txt");
       reporter = Reporter.NULL;
       fs.delete(dir, true);
 
       job.setClass(FlatFileInputFormat.SerializationImplKey,
-                   org.apache.hadoop.io.serializer.JavaSerialization.class,
-                   org.apache.hadoop.io.serializer.Serialization.class);
-      
-      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
-                   JavaTestObjFlatFileInputFormat.class, java.io.Serializable.class);
-      
+          org.apache.hadoop.io.serializer.JavaSerialization.class,
+          org.apache.hadoop.io.serializer.Serialization.class);
+
+      job
+          .setClass(
+              FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+              JavaTestObjFlatFileInputFormat.class, java.io.Serializable.class);
+
       //
       // Write some data out to a flat file
       //
@@ -78,38 +80,40 @@
       // construct some data and write it
       serializer.open(ds);
       for (int i = 0; i < 10; i++) {
-        serializer.serialize(new JavaTestObjFlatFileInputFormat("Hello World! " + String.valueOf(i), i));
+        serializer.serialize(new JavaTestObjFlatFileInputFormat("Hello World! "
+            + String.valueOf(i), i));
       }
       serializer.close();
 
       //
       // Construct the reader
       //
-      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Serializable>> format =
-        new FlatFileInputFormat<Serializable>();
+      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Serializable>> format = new FlatFileInputFormat<Serializable>();
       InputSplit[] splits = format.getSplits(job, 1);
 
       // construct the record reader
-      RecordReader<Void, FlatFileInputFormat.RowContainer<Serializable>> reader =
-        format.getRecordReader(splits[0], job, reporter);
+      RecordReader<Void, FlatFileInputFormat.RowContainer<Serializable>> reader = format
+          .getRecordReader(splits[0], job, reporter);
 
       // create key/value
       Void key = reader.createKey();
-      FlatFileInputFormat.RowContainer<Serializable> value = reader.createValue();
-      
+      FlatFileInputFormat.RowContainer<Serializable> value = reader
+          .createValue();
+
       //
       // read back the data using the FlatFileRecordReader
       //
       int count = 0;
       while (reader.next(key, value)) {
         assertTrue(key == null);
-        assertTrue(((JavaTestObjFlatFileInputFormat)value.row).s.equals("Hello World! " +String.valueOf(count)));
-        assertTrue(((JavaTestObjFlatFileInputFormat)value.row).num == count);
+        assertTrue(((JavaTestObjFlatFileInputFormat) value.row).s
+            .equals("Hello World! " + String.valueOf(count)));
+        assertTrue(((JavaTestObjFlatFileInputFormat) value.row).num == count);
         count++;
       }
       reader.close();
 
-    } catch(Exception e) {
+    } catch (Exception e) {
       System.err.println("caught: " + e);
       e.printStackTrace();
     } finally {
@@ -119,9 +123,9 @@
 
   public void testFlatFileInputRecord() throws Exception {
     Configuration conf;
-    JobConf job ;
+    JobConf job;
     FileSystem fs;
-    Path dir ;
+    Path dir;
     Path file;
     Reporter reporter;
     FSDataOutputStream ds;
@@ -133,149 +137,124 @@
       conf = new Configuration();
       job = new JobConf(conf);
       fs = FileSystem.getLocal(conf);
-      dir = new Path(System.getProperty("test.data.dir",".") + "/mapred");
+      dir = new Path(System.getProperty("test.data.dir", ".") + "/mapred");
       file = new Path(dir, "test.txt");
       reporter = Reporter.NULL;
       fs.delete(dir, true);
 
       job.setClass(FlatFileInputFormat.SerializationImplKey,
-                   org.apache.hadoop.io.serializer.WritableSerialization.class,
-                   org.apache.hadoop.io.serializer.Serialization.class);
-      
-      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
-                   RecordTestObj.class, Writable.class);
-      
-      //
-      // Write some data out to a flat file
-      //
-      FileInputFormat.setInputPaths(job, dir);
-      ds = fs.create(file);
-      Serializer serializer = new WritableSerialization().getSerializer(Writable.class);
-
-      // construct some data and write it
-      serializer.open(ds);
-      for (int i = 0; i < 10; i++) {
-        serializer.serialize(new RecordTestObj("Hello World! " + String.valueOf(i), i));
-      }
-      serializer.close();
-
-      //
-      // Construct the reader
-      //
-      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Writable>> format =
-        new FlatFileInputFormat<Writable>();
-      InputSplit[] splits = format.getSplits(job, 1);
-
-      // construct the record reader
-      RecordReader<Void, FlatFileInputFormat.RowContainer<Writable>> reader =
-        format.getRecordReader(splits[0], job, reporter);
-
-      // create key/value
-      Void key = reader.createKey();
-      FlatFileInputFormat.RowContainer<Writable> value = reader.createValue();
-      
-      //
-      // read back the data using the FlatFileRecordReader
-      //
-      int count = 0;
-      while (reader.next(key, value)) {
-        assertTrue(key == null);
-        assertTrue(((RecordTestObj)value.row).getS().equals("Hello World! " +String.valueOf(count)));
-        assertTrue(((RecordTestObj)value.row).getNum() == count);
-        count++;
-      }
-      reader.close();
+          org.apache.hadoop.io.serializer.WritableSerialization.class,
+          org.apache.hadoop.io.serializer.Serialization.class);
 
-    } catch(Exception e) {
-      System.err.println("caught: " + e);
-      e.printStackTrace();
-    } finally {
-    }
+      job
+          .setClass(
+              FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
+              RecordTestObj.class, Writable.class);
 
-  }
-  /*
-  public void testFlatFileInputThrift() throws Exception {
-    Configuration conf;
-    JobConf job ;
-    FileSystem fs;
-    Path dir ;
-    Path file;
-    Reporter reporter;
-    FSDataOutputStream ds;
-
-    try {
-      //
-      // create job and filesystem and reporter and such.
-      //
-      conf = new Configuration();
-      job = new JobConf(conf);
-      fs = FileSystem.getLocal(conf);
-      dir = new Path(System.getProperty("test.data.dir",".") + "/mapred");
-      file = new Path(dir, "test.txt");
-      reporter = Reporter.NULL;
-      fs.delete(dir, true);
-
-      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationImplKey,
-                   org.apache.hadoop.contrib.serialization.thrift.ThriftSerialization.class,
-                   org.apache.hadoop.io.serializer.Serialization.class);
-      
-      job.setClass(FlatFileInputFormat.SerializationContextFromConf.SerializationSubclassKey,
-                   FlatFileThriftTestObj.class, TBase.class);
-      
       //
       // Write some data out to a flat file
       //
       FileInputFormat.setInputPaths(job, dir);
       ds = fs.create(file);
-      Serializer serializer = new ThriftSerialization().getSerializer(TBase.class);
+      Serializer serializer = new WritableSerialization()
+          .getSerializer(Writable.class);
 
       // construct some data and write it
       serializer.open(ds);
       for (int i = 0; i < 10; i++) {
-        serializer.serialize(new FlatFileThriftTestObj("Hello World! " + String.valueOf(i), i));
+        serializer.serialize(new RecordTestObj("Hello World! "
+            + String.valueOf(i), i));
       }
       serializer.close();
 
       //
       // Construct the reader
       //
-      FileInputFormat<Void, FlatFileInputFormat.RowContainer<TBase>> format =
-        new FlatFileInputFormat<TBase>();
+      FileInputFormat<Void, FlatFileInputFormat.RowContainer<Writable>> format = new FlatFileInputFormat<Writable>();
       InputSplit[] splits = format.getSplits(job, 1);
 
       // construct the record reader
-      RecordReader<Void, FlatFileInputFormat.RowContainer<TBase>> reader =
-        format.getRecordReader(splits[0], job, reporter);
+      RecordReader<Void, FlatFileInputFormat.RowContainer<Writable>> reader = format
+          .getRecordReader(splits[0], job, reporter);
 
       // create key/value
       Void key = reader.createKey();
-      FlatFileInputFormat.RowContainer<TBase> value = reader.createValue();
-      
+      FlatFileInputFormat.RowContainer<Writable> value = reader.createValue();
+
       //
       // read back the data using the FlatFileRecordReader
       //
       int count = 0;
       while (reader.next(key, value)) {
         assertTrue(key == null);
-        assertTrue(((FlatFileThriftTestObj)value.row).s.equals("Hello World! " +String.valueOf(count)));
-        assertTrue(((FlatFileThriftTestObj)value.row).num == count);
+        assertTrue(((RecordTestObj) value.row).getS().equals(
+            "Hello World! " + String.valueOf(count)));
+        assertTrue(((RecordTestObj) value.row).getNum() == count);
         count++;
       }
       reader.close();
 
-    } catch(Exception e) {
+    } catch (Exception e) {
       System.err.println("caught: " + e);
       e.printStackTrace();
     } finally {
     }
 
   }
-  */
 
+  /*
+   * public void testFlatFileInputThrift() throws Exception { Configuration
+   * conf; JobConf job ; FileSystem fs; Path dir ; Path file; Reporter reporter;
+   * FSDataOutputStream ds;
+   * 
+   * try { // // create job and filesystem and reporter and such. // conf = new
+   * Configuration(); job = new JobConf(conf); fs = FileSystem.getLocal(conf);
+   * dir = new Path(System.getProperty("test.data.dir",".") + "/mapred"); file =
+   * new Path(dir, "test.txt"); reporter = Reporter.NULL; fs.delete(dir, true);
+   * 
+   * job.setClass(FlatFileInputFormat.SerializationContextFromConf.
+   * SerializationImplKey,
+   * org.apache.hadoop.contrib.serialization.thrift.ThriftSerialization.class,
+   * org.apache.hadoop.io.serializer.Serialization.class);
+   * 
+   * job.setClass(FlatFileInputFormat.SerializationContextFromConf.
+   * SerializationSubclassKey, FlatFileThriftTestObj.class, TBase.class);
+   * 
+   * // // Write some data out to a flat file //
+   * FileInputFormat.setInputPaths(job, dir); ds = fs.create(file); Serializer
+   * serializer = new ThriftSerialization().getSerializer(TBase.class);
+   * 
+   * // construct some data and write it serializer.open(ds); for (int i = 0; i
+   * < 10; i++) { serializer.serialize(new FlatFileThriftTestObj("Hello World! "
+   * + String.valueOf(i), i)); } serializer.close();
+   * 
+   * // // Construct the reader // FileInputFormat<Void,
+   * FlatFileInputFormat.RowContainer<TBase>> format = new
+   * FlatFileInputFormat<TBase>(); InputSplit[] splits = format.getSplits(job,
+   * 1);
+   * 
+   * // construct the record reader RecordReader<Void,
+   * FlatFileInputFormat.RowContainer<TBase>> reader =
+   * format.getRecordReader(splits[0], job, reporter);
+   * 
+   * // create key/value Void key = reader.createKey();
+   * FlatFileInputFormat.RowContainer<TBase> value = reader.createValue();
+   * 
+   * // // read back the data using the FlatFileRecordReader // int count = 0;
+   * while (reader.next(key, value)) { assertTrue(key == null);
+   * assertTrue(((FlatFileThriftTestObj)value.row).s.equals("Hello World! "
+   * +String.valueOf(count))); assertTrue(((FlatFileThriftTestObj)value.row).num
+   * == count); count++; } reader.close();
+   * 
+   * } catch(Exception e) { System.err.println("caught: " + e);
+   * e.printStackTrace(); } finally { }
+   * 
+   * }
+   */
 
   public static void main(String[] args) throws Exception {
     new TestFlatFileInputFormat().testFlatFileInputJava();
     new TestFlatFileInputFormat().testFlatFileInputRecord();
-    //    new TestFlatFileInputFormat().testFlatFileInputThrift();
+    // new TestFlatFileInputFormat().testFlatFileInputThrift();
   }
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Thu Jan 21 10:37:58 2010
@@ -31,8 +31,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
+import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
@@ -97,7 +97,8 @@
       bytesArray = new byte[][] { "123".getBytes("UTF-8"),
           "456".getBytes("UTF-8"), "789".getBytes("UTF-8"),
           "1000".getBytes("UTF-8"), "5.3".getBytes("UTF-8"),
-          "hive and hadoop".getBytes("UTF-8"), new byte[0], "NULL".getBytes("UTF-8") };
+          "hive and hadoop".getBytes("UTF-8"), new byte[0],
+          "NULL".getBytes("UTF-8") };
       s = new BytesRefArrayWritable(bytesArray.length);
       s.set(0, new BytesRefWritable("123".getBytes("UTF-8")));
       s.set(1, new BytesRefWritable("456".getBytes("UTF-8")));
@@ -180,12 +181,14 @@
       assertEquals("Field size should be 8", 8, fieldRefs.size());
       for (int j = 0; j < fieldRefs.size(); j++) {
         Object fieldData = oi.getStructFieldData(row, fieldRefs.get(j));
-        Object standardWritableData = ObjectInspectorUtils.copyToStandardObject(fieldData, 
-            fieldRefs.get(j).getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
-        if (i == 0)
+        Object standardWritableData = ObjectInspectorUtils
+            .copyToStandardObject(fieldData, fieldRefs.get(j)
+                .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
+        if (i == 0) {
           assertEquals("Field " + i, standardWritableData, expectedRecord_1[j]);
-        else
+        } else {
           assertEquals("Field " + i, standardWritableData, expectedRecord_2[j]);
+        }
       }
     }
 
@@ -307,12 +310,15 @@
       assertEquals("Field size should be 8", 8, fieldRefs.size());
       for (int i = 0; i < fieldRefs.size(); i++) {
         Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
-        Object standardWritableData = ObjectInspectorUtils.copyToStandardObject(fieldData, 
-            fieldRefs.get(i).getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
+        Object standardWritableData = ObjectInspectorUtils
+            .copyToStandardObject(fieldData, fieldRefs.get(i)
+                .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
         assertEquals("Field " + i, standardWritableData, expectedFieldsData[i]);
       }
       // Serialize
-      assertEquals("Class of the serialized object should be BytesRefArrayWritable", BytesRefArrayWritable.class, serDe.getSerializedClass());
+      assertEquals(
+          "Class of the serialized object should be BytesRefArrayWritable",
+          BytesRefArrayWritable.class, serDe.getSerializedClass());
       BytesRefArrayWritable serializedText = (BytesRefArrayWritable) serDe
           .serialize(row, oi);
       assertEquals("Serialized data", s, serializedText);
@@ -337,7 +343,7 @@
 
     LongWritable rowID = new LongWritable();
     BytesRefArrayWritable cols = new BytesRefArrayWritable();
-    
+
     while (reader.next(rowID)) {
       reader.getCurrentRow(cols);
       cols.resetValid(8);
@@ -350,12 +356,16 @@
 
       for (int i : readCols) {
         Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
-        Object standardWritableData = ObjectInspectorUtils.copyToStandardObject(fieldData, 
-            fieldRefs.get(i).getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
-        assertEquals("Field " + i, standardWritableData, expectedPartitalFieldsData[i]);
+        Object standardWritableData = ObjectInspectorUtils
+            .copyToStandardObject(fieldData, fieldRefs.get(i)
+                .getFieldObjectInspector(), ObjectInspectorCopyOption.WRITABLE);
+        assertEquals("Field " + i, standardWritableData,
+            expectedPartitalFieldsData[i]);
       }
 
-      assertEquals("Class of the serialized object should be BytesRefArrayWritable", BytesRefArrayWritable.class, serDe.getSerializedClass());
+      assertEquals(
+          "Class of the serialized object should be BytesRefArrayWritable",
+          BytesRefArrayWritable.class, serDe.getSerializedClass());
       BytesRefArrayWritable serializedBytes = (BytesRefArrayWritable) serDe
           .serialize(row, oi);
       assertEquals("Serialized data", patialS, serializedBytes);

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Thu Jan 21 10:37:58 2010
@@ -31,15 +31,14 @@
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer;
 import org.apache.hadoop.hive.serde2.thrift.test.Complex;
-import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.StringUtils;
-
 import org.apache.thrift.protocol.TBinaryProtocol;
 
 public class TestHive extends TestCase {
@@ -47,26 +46,32 @@
   private HiveConf hiveConf;
   private FileSystem fs;
 
+  @Override
   protected void setUp() throws Exception {
     super.setUp();
     hiveConf = new HiveConf(this.getClass());
     fs = FileSystem.get(hiveConf);
     try {
-      this.hm = Hive.get(hiveConf);
+      hm = Hive.get(hiveConf);
     } catch (Exception e) {
       System.err.println(StringUtils.stringifyException(e));
-      System.err.println("Unable to initialize Hive Metastore using configruation: \n " + hiveConf);
+      System.err
+          .println("Unable to initialize Hive Metastore using configruation: \n "
+              + hiveConf);
       throw e;
     }
   }
 
+  @Override
   protected void tearDown() throws Exception {
     try {
       super.tearDown();
       Hive.closeCurrent();
     } catch (Exception e) {
       System.err.println(StringUtils.stringifyException(e));
-      System.err.println("Unable to close Hive Metastore using configruation: \n " + hiveConf);
+      System.err
+          .println("Unable to close Hive Metastore using configruation: \n "
+              + hiveConf);
       throw e;
     }
   }
@@ -76,23 +81,27 @@
       // create a simple table and test create, drop, get
       String tableName = "table_for_testtable";
       try {
-        this.hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+        hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
       } catch (HiveException e1) {
         e1.printStackTrace();
         assertTrue("Unable to drop table", false);
       }
       Table tbl = new Table(tableName);
-      List<FieldSchema>  fields = tbl.getCols();
+      List<FieldSchema> fields = tbl.getCols();
 
-      fields.add(new FieldSchema("col1", Constants.INT_TYPE_NAME, "int -- first column"));
-      fields.add(new FieldSchema("col2", Constants.STRING_TYPE_NAME, "string -- second column"));
-      fields.add(new FieldSchema("col3", Constants.DOUBLE_TYPE_NAME, "double -- thrift column"));
+      fields.add(new FieldSchema("col1", Constants.INT_TYPE_NAME,
+          "int -- first column"));
+      fields.add(new FieldSchema("col2", Constants.STRING_TYPE_NAME,
+          "string -- second column"));
+      fields.add(new FieldSchema("col3", Constants.DOUBLE_TYPE_NAME,
+          "double -- thrift column"));
       tbl.setFields(fields);
 
       tbl.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
       tbl.setInputFormatClass(SequenceFileInputFormat.class);
 
-      tbl.setProperty("comment", "this is a test table created as part junit tests");
+      tbl.setProperty("comment",
+          "this is a test table created as part junit tests");
 
       List<String> bucketCols = tbl.getBucketCols();
       bucketCols.add("col1");
@@ -103,16 +112,20 @@
         assertTrue("Unable to set bucket column for table: " + tableName, false);
       }
 
-      List<FieldSchema>  partCols = new ArrayList<FieldSchema>();
-      partCols.add(new FieldSchema("ds", Constants.STRING_TYPE_NAME, 
-          "partition column, date but in string format as date type is not yet supported in QL"));
+      List<FieldSchema> partCols = new ArrayList<FieldSchema>();
+      partCols
+          .add(new FieldSchema(
+              "ds",
+              Constants.STRING_TYPE_NAME,
+              "partition column, date but in string format as date type is not yet supported in QL"));
       tbl.setPartCols(partCols);
 
       tbl.setNumBuckets((short) 512);
       tbl.setOwner("pchakka");
       tbl.setRetention(10);
 
-      // set output format parameters (these are not supported by QL but only for demo purposes)
+      // set output format parameters (these are not supported by QL but only
+      // for demo purposes)
       tbl.setSerdeParam(Constants.FIELD_DELIM, "1");
       tbl.setSerdeParam(Constants.LINE_DELIM, "\n");
       tbl.setSerdeParam(Constants.MAPKEY_DELIM, "3");
@@ -135,23 +148,34 @@
       try {
         ft = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
         ft.checkValidity();
-        assertEquals("Table names didn't match for table: " + tableName, tbl.getName(), ft.getName());
-        assertEquals("Table owners didn't match for table: " + tableName, tbl.getOwner(), ft.getOwner());
-        assertEquals("Table retention didn't match for table: " + tableName, tbl.getRetention(), ft.getRetention());
-        assertEquals("Data location is not set correctly", wh.getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName).toString(), ft.getDataLocation().toString());
-        // now that URI is set correctly, set the original table's uri and then compare the two tables
+        assertEquals("Table names didn't match for table: " + tableName, tbl
+            .getName(), ft.getName());
+        assertEquals("Table owners didn't match for table: " + tableName, tbl
+            .getOwner(), ft.getOwner());
+        assertEquals("Table retention didn't match for table: " + tableName,
+            tbl.getRetention(), ft.getRetention());
+        assertEquals("Data location is not set correctly", wh
+            .getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+                tableName).toString(), ft.getDataLocation().toString());
+        // now that URI is set correctly, set the original table's uri and then
+        // compare the two tables
         tbl.setDataLocation(ft.getDataLocation());
-        assertTrue("Tables doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable()));
-        assertEquals("Serde is not set correctly", tbl.getDeserializer().getClass().getName(), ft.getDeserializer().getClass().getName());
-        assertEquals("SerializationLib is not set correctly", tbl.getSerializationLib(), LazySimpleSerDe.class.getName());
+        assertTrue("Tables doesn't match: " + tableName, ft.getTTable().equals(
+            tbl.getTTable()));
+        assertEquals("Serde is not set correctly", tbl.getDeserializer()
+            .getClass().getName(), ft.getDeserializer().getClass().getName());
+        assertEquals("SerializationLib is not set correctly", tbl
+            .getSerializationLib(), LazySimpleSerDe.class.getName());
       } catch (HiveException e) {
         e.printStackTrace();
         assertTrue("Unable to fetch table correctly: " + tableName, false);
       }
 
       try {
-        hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, true, false);
-        Table ft2 = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, false);
+        hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName, true,
+            false);
+        Table ft2 = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+            tableName, false);
         assertNull("Unable to drop table ", ft2);
       } catch (HiveException e) {
         assertTrue("Unable to drop table: " + tableName, false);
@@ -165,13 +189,14 @@
 
   /**
    * Tests create and fetch of a thrift based table
-   * @throws Throwable 
+   * 
+   * @throws Throwable
    */
   public void testThriftTable() throws Throwable {
     String tableName = "table_for_test_thrifttable";
     try {
       try {
-        this.hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
+        hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
       } catch (HiveException e1) {
         System.err.println(StringUtils.stringifyException(e1));
         assertTrue("Unable to drop table", false);
@@ -181,7 +206,8 @@
       tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
       tbl.setSerializationLib(ThriftDeserializer.class.getName());
       tbl.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
-      tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
+      tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class
+          .getName());
       try {
         hm.createTable(tbl);
       } catch (HiveException e) {
@@ -195,15 +221,24 @@
         ft = hm.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
         assertNotNull("Unable to fetch table", ft);
         ft.checkValidity();
-        assertEquals("Table names didn't match for table: " + tableName, tbl.getName(), ft.getName());
-        assertEquals("Table owners didn't match for table: " + tableName, tbl.getOwner(), ft.getOwner());
-        assertEquals("Table retention didn't match for table: " + tableName, tbl.getRetention(), ft.getRetention());
-        assertEquals("Data location is not set correctly", wh.getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName).toString(), ft.getDataLocation().toString());
-        // now that URI is set correctly, set the original table's uri and then compare the two tables
+        assertEquals("Table names didn't match for table: " + tableName, tbl
+            .getName(), ft.getName());
+        assertEquals("Table owners didn't match for table: " + tableName, tbl
+            .getOwner(), ft.getOwner());
+        assertEquals("Table retention didn't match for table: " + tableName,
+            tbl.getRetention(), ft.getRetention());
+        assertEquals("Data location is not set correctly", wh
+            .getDefaultTablePath(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+                tableName).toString(), ft.getDataLocation().toString());
+        // now that URI is set correctly, set the original table's uri and then
+        // compare the two tables
         tbl.setDataLocation(ft.getDataLocation());
-        assertTrue("Tables  doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable()));
-        assertEquals("SerializationLib is not set correctly", tbl.getSerializationLib(), ThriftDeserializer.class.getName());
-        assertEquals("Serde is not set correctly", tbl.getDeserializer().getClass().getName(), ft.getDeserializer().getClass().getName());
+        assertTrue("Tables  doesn't match: " + tableName, ft.getTTable()
+            .equals(tbl.getTTable()));
+        assertEquals("SerializationLib is not set correctly", tbl
+            .getSerializationLib(), ThriftDeserializer.class.getName());
+        assertEquals("Serde is not set correctly", tbl.getDeserializer()
+            .getClass().getName(), ft.getDeserializer().getClass().getName());
       } catch (HiveException e) {
         System.err.println(StringUtils.stringifyException(e));
         assertTrue("Unable to fetch table correctly: " + tableName, false);
@@ -216,14 +251,16 @@
     }
   }
 
-  private static Table createTestTable(String dbName, String tableName) throws HiveException {
+  private static Table createTestTable(String dbName, String tableName)
+      throws HiveException {
     Table tbl = new Table(tableName);
     tbl.getTTable().setDbName(dbName);
     tbl.setInputFormatClass(SequenceFileInputFormat.class.getName());
     tbl.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
     tbl.setSerializationLib(ThriftDeserializer.class.getName());
     tbl.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
-    tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
+    tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class
+        .getName());
     return tbl;
   }
 
@@ -250,17 +287,17 @@
       fts = hm.getTablesForDb(dbName, ".*1");
       assertEquals(1, fts.size());
       assertEquals(ts.get(0), fts.get(0));
-      
-      //also test getting a table from a specific db
+
+      // also test getting a table from a specific db
       Table table1 = hm.getTable(dbName, table1Name);
       assertNotNull(table1);
       assertEquals(table1Name, table1.getName());
-      
+
       assertTrue(fs.exists(table1.getPath()));
-      //and test dropping this specific table
+      // and test dropping this specific table
       hm.dropTable(dbName, table1Name);
       assertFalse(fs.exists(table1.getPath()));
-      
+
       hm.dropDatabase(dbName);
     } catch (Throwable e) {
       System.err.println(StringUtils.stringifyException(e));
@@ -286,7 +323,8 @@
       part_cols.add("ds");
       part_cols.add("hr");
       try {
-        hm.createTable(tableName, cols, part_cols, TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class);
+        hm.createTable(tableName, cols, part_cols, TextInputFormat.class,
+            HiveIgnoreKeyTextOutputFormat.class);
       } catch (HiveException e) {
         System.err.println(StringUtils.stringifyException(e));
         assertTrue("Unable to create table: " + tableName, false);

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java Thu Jan 21 10:37:58 2010
@@ -16,8 +16,6 @@
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.mapred.TextOutputFormat;
-
 import org.apache.thrift.TException;
 
 public class TestHiveMetaStoreChecker extends TestCase {
@@ -26,11 +24,11 @@
   private FileSystem fs;
   private HiveMetaStoreChecker checker = null;
 
-  private String dbName = "dbname";
-  private String tableName = "tablename";
+  private final String dbName = "dbname";
+  private final String tableName = "tablename";
 
-  private String partDateName = "partdate";
-  private String partCityName = "partcity";
+  private final String partDateName = "partdate";
+  private final String partCityName = "partcity";
 
   private List<FieldSchema> partCols;
   private List<Map<String, String>> parts;
@@ -42,10 +40,8 @@
     checker = new HiveMetaStoreChecker(hive);
 
     partCols = new ArrayList<FieldSchema>();
-    partCols.add(new FieldSchema(partDateName, Constants.STRING_TYPE_NAME, 
-        ""));
-    partCols.add(new FieldSchema(partCityName, Constants.STRING_TYPE_NAME, 
-        ""));
+    partCols.add(new FieldSchema(partDateName, Constants.STRING_TYPE_NAME, ""));
+    partCols.add(new FieldSchema(partCityName, Constants.STRING_TYPE_NAME, ""));
 
     parts = new ArrayList<Map<String, String>>();
     Map<String, String> part1 = new HashMap<String, String>();
@@ -140,13 +136,13 @@
     assertTrue(result.getTablesNotOnFs().isEmpty());
     assertTrue(result.getPartitionsNotOnFs().isEmpty());
     assertTrue(result.getPartitionsNotInMs().isEmpty());
-    
-    //create a new external table
+
+    // create a new external table
     hive.dropTable(dbName, tableName);
     table.setProperty("EXTERNAL", "TRUE");
     hive.createTable(table);
-    
-    //should return all ok
+
+    // should return all ok
     result = new CheckResult();
     checker.checkMetastore(dbName, null, null, result);
     assertTrue(result.getTablesNotInMs().isEmpty());
@@ -188,7 +184,7 @@
     fs = partToRemovePath.getFileSystem(hive.getConf());
     fs.delete(partToRemovePath, true);
 
-    result = new CheckResult();    
+    result = new CheckResult();
     checker.checkMetastore(dbName, tableName, null, result);
     // missing one partition on fs
     assertTrue(result.getTablesNotInMs().isEmpty());

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestPartition.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestPartition.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestPartition.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestPartition.java Thu Jan 21 10:37:58 2010
@@ -27,25 +27,26 @@
   public void testPartition() throws HiveException, URISyntaxException {
     StorageDescriptor sd = new StorageDescriptor();
     sd.setLocation("partlocation");
-    
+
     Partition tp = new Partition();
     tp.setTableName(TABLENAME);
     tp.setSd(sd);
-    
+
     List<String> values = new ArrayList<String>();
     values.add(PARTITION_VALUE);
     tp.setValues(values);
-    
+
     List<FieldSchema> partCols = new ArrayList<FieldSchema>();
     partCols.add(new FieldSchema(PARTITION_COL, "string", ""));
-    
+
     Table tbl = new Table(TABLENAME);
     tbl.setDataLocation(new URI("tmplocation"));
     tbl.setPartCols(partCols);
-    
-    Map<String, String> spec = new org.apache.hadoop.hive.ql.metadata.Partition(tbl, tp).getSpec();
+
+    Map<String, String> spec = new org.apache.hadoop.hive.ql.metadata.Partition(
+        tbl, tp).getSpec();
     assertFalse(spec.isEmpty());
     assertEquals(spec.get(PARTITION_COL), PARTITION_VALUE);
   }
-  
+
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/tool/TestLineageInfo.java Thu Jan 21 10:37:58 2010
@@ -20,10 +20,10 @@
 
 import java.util.TreeSet;
 
-import org.apache.hadoop.hive.ql.tools.LineageInfo;
-
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.ql.tools.LineageInfo;
+
 public class TestLineageInfo extends TestCase {
 
   /**

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Thu Jan 21 10:37:58 2010
@@ -28,8 +28,6 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-
-
 public class UDAFTestMax extends UDAF {
 
   static public class MaxShortEvaluator implements UDAFEvaluator {
@@ -266,5 +264,4 @@
     }
   }
 
-
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Thu Jan 21 10:37:58 2010
@@ -26,8 +26,9 @@
  * A UDF for testing, which evaluates the length of a string.
  */
 public class UDFTestLength extends UDF {
-  
+
   IntWritable result = new IntWritable();
+
   public IntWritable evaluate(Text s) {
     if (s == null) {
       return null;

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java Thu Jan 21 10:37:58 2010
@@ -19,15 +19,13 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
 
 /**
- * A UDF for testing, which evaluates the length of a string.
- * This UDF uses Java Primitive classes for parameters.
+ * A UDF for testing, which evaluates the length of a string. This UDF uses Java
+ * Primitive classes for parameters.
  */
 public class UDFTestLength2 extends UDF {
-  
+
   public Integer evaluate(String s) {
     if (s == null) {
       return null;

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Thu Jan 21 10:37:58 2010
@@ -25,7 +25,6 @@
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -33,7 +32,7 @@
 import org.apache.hadoop.io.Text;
 
 /**
- * Mimics oracle's function translate(str1, str2, str3) 
+ * Mimics oracle's function translate(str1, str2, str3)
  */
 public class GenericUDFTestTranslate extends GenericUDF {
   ObjectInspector[] argumentOIs;
@@ -43,43 +42,48 @@
    */
   static String getOrdinal(int i) {
     int unit = i % 10;
-    return (i <= 0) ?  ""
-        : (i != 11 && unit == 1) ?  i + "st"
-        : (i != 12 && unit == 2) ?  i + "nd"
-        : (i != 13 && unit == 3) ?  i + "rd"
-        : i + "th";
+    return (i <= 0) ? "" : (i != 11 && unit == 1) ? i + "st"
+        : (i != 12 && unit == 2) ? i + "nd" : (i != 13 && unit == 3) ? i + "rd"
+            : i + "th";
   }
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments)
       throws UDFArgumentException {
-    if(arguments.length != 3) {
+    if (arguments.length != 3) {
       throw new UDFArgumentLengthException(
-          "The function TRANSLATE(expr,from_string,to_string) accepts exactly 3 arguments, but " 
-          + arguments.length + " arguments is found.");
+          "The function TRANSLATE(expr,from_string,to_string) accepts exactly 3 arguments, but "
+              + arguments.length + " arguments is found.");
     }
 
-    for(int i = 0; i < 3; i++) {
-      if(arguments[i].getTypeName() != Constants.STRING_TYPE_NAME
+    for (int i = 0; i < 3; i++) {
+      if (arguments[i].getTypeName() != Constants.STRING_TYPE_NAME
           && arguments[i].getTypeName() != Constants.VOID_TYPE_NAME) {
-        throw new UDFArgumentTypeException(i,
-            "The " + getOrdinal(i + 1) + " argument of function TRANSLATE is expected to \"" + Constants.STRING_TYPE_NAME
-            + "\", but \"" + arguments[i].getTypeName() + "\" is found");
+        throw new UDFArgumentTypeException(i, "The " + getOrdinal(i + 1)
+            + " argument of function TRANSLATE is expected to \""
+            + Constants.STRING_TYPE_NAME + "\", but \""
+            + arguments[i].getTypeName() + "\" is found");
       }
     }
-    
-    this.argumentOIs = arguments;
+
+    argumentOIs = arguments;
     return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
   }
 
-  private Text resultText = new Text();
+  private final Text resultText = new Text();
+
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    if(arguments[0].get() == null || arguments[1].get() == null || arguments[2].get() == null)
+    if (arguments[0].get() == null || arguments[1].get() == null
+        || arguments[2].get() == null) {
       return null;
-    String exprString = ((StringObjectInspector)argumentOIs[0]).getPrimitiveJavaObject(arguments[0].get());
-    String fromString = ((StringObjectInspector)argumentOIs[1]).getPrimitiveJavaObject(arguments[1].get());
-    String toString = ((StringObjectInspector)argumentOIs[2]).getPrimitiveJavaObject(arguments[2].get());
+    }
+    String exprString = ((StringObjectInspector) argumentOIs[0])
+        .getPrimitiveJavaObject(arguments[0].get());
+    String fromString = ((StringObjectInspector) argumentOIs[1])
+        .getPrimitiveJavaObject(arguments[1].get());
+    String toString = ((StringObjectInspector) argumentOIs[2])
+        .getPrimitiveJavaObject(arguments[2].get());
 
     char[] expr = exprString.toCharArray();
     char[] from = fromString.toCharArray();
@@ -88,21 +92,23 @@
     System.arraycopy(expr, 0, result, 0, expr.length);
     Set<Character> seen = new HashSet<Character>();
 
-    for(int i = 0; i < from.length; i++) {
-      if(seen.contains(from[i])) 
+    for (int i = 0; i < from.length; i++) {
+      if (seen.contains(from[i])) {
         continue;
+      }
       seen.add(from[i]);
-      for(int j = 0; j < expr.length; j++) {
-        if(expr[j] == from[i]) {
+      for (int j = 0; j < expr.length; j++) {
+        if (expr[j] == from[i]) {
           result[j] = (i < to.length) ? to[i] : 0;
         }
       }
     }
 
     int pos = 0;
-    for(int i = 0; i < result.length; i++) {
-      if(result[i] != 0)
-        result[pos ++] = result[i];
+    for (int i = 0; i < result.length; i++) {
+      if (result[i] != 0) {
+        result[pos++] = result[i];
+      }
     }
     resultText.set(new String(result, 0, pos));
     return resultText;
@@ -110,7 +116,8 @@
 
   @Override
   public String getDisplayString(String[] children) {
-    assert(children.length == 3);
-    return "translate(" + children[0] + "," + children[1] + "," + children[2] + ")";
+    assert (children.length == 3);
+    return "translate(" + children[0] + "," + children[1] + "," + children[2]
+        + ")";
   }
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java Thu Jan 21 10:37:58 2010
@@ -18,17 +18,18 @@
 
 package org.apache.hadoop.hive.scripts;
 
-import java.io.*;
-
-import java.util.HashMap;
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 public class extracturl {
 
-  protected static final Pattern pattern = Pattern.compile("<a href=\"http://([\\w\\d]+\\.html)\">link</a>", Pattern.CASE_INSENSITIVE);
-  static InputStreamReader converter = new InputStreamReader (System.in);
-  static BufferedReader   in = new BufferedReader (converter);
+  protected static final Pattern pattern = Pattern.compile(
+      "<a href=\"http://([\\w\\d]+\\.html)\">link</a>",
+      Pattern.CASE_INSENSITIVE);
+  static InputStreamReader converter = new InputStreamReader(System.in);
+  static BufferedReader in = new BufferedReader(converter);
 
   public static void main(String[] args) {
     String input;
@@ -36,15 +37,14 @@
       while ((input = in.readLine()) != null) {
         Matcher m = pattern.matcher(input);
 
-        while(m.find()) {
+        while (m.find()) {
           String url = input.substring(m.start(1), m.end(1));
           System.out.println(url + "\t" + "1");
-        } 
+        }
       }
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       e.printStackTrace();
       System.exit(1);
     }
-  } 
+  }
 }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/TestSerDe.java Thu Jan 21 10:37:58 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.serde2;
 
-import java.io.UnsupportedEncodingException;
 import java.nio.charset.CharacterCodingException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -37,9 +36,6 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.SerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
 
 public class TestSerDe implements SerDe {
 
@@ -49,7 +45,6 @@
     return shortName();
   }
 
-
   public static String shortName() {
     return "test_meta";
   }
@@ -60,20 +55,22 @@
     try {
       SerDeUtils.registerSerDe(shortName(), Class.forName(className));
       // For backward compatibility: this class replaces the following class.
-      SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.TestSerDe", Class.forName(className));
-    } catch(Exception e) {
+      SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.TestSerDe", Class
+          .forName(className));
+    } catch (Exception e) {
       throw new RuntimeException(e);
     }
   }
-  
+
   final public static String DefaultSeparator = "\002";
 
   private String separator;
   // constant for now, will make it configurable later.
-  private String nullString = "\\N"; 
+  private final String nullString = "\\N";
   private List<String> columnNames;
   private ObjectInspector cachedObjectInspector;
 
+  @Override
   public String toString() {
     return "TestSerDe[" + separator + "," + columnNames + "]";
   }
@@ -82,15 +79,16 @@
     separator = DefaultSeparator;
   }
 
-  public void initialize(Configuration job, Properties tbl) throws SerDeException {
+  public void initialize(Configuration job, Properties tbl)
+      throws SerDeException {
     separator = DefaultSeparator;
     String alt_sep = tbl.getProperty("testserde.default.serialization.format");
-    if(alt_sep != null && alt_sep.length() > 0) {
+    if (alt_sep != null && alt_sep.length() > 0) {
       try {
-        byte b [] = new byte[1];
+        byte b[] = new byte[1];
         b[0] = Byte.valueOf(alt_sep).byteValue();
         separator = new String(b);
-      } catch(NumberFormatException e) {
+      } catch (NumberFormatException e) {
         separator = alt_sep;
       }
     }
@@ -98,25 +96,29 @@
     String columnProperty = tbl.getProperty("columns");
     if (columnProperty == null || columnProperty.length() == 0) {
       // Hack for tables with no columns
-      // Treat it as a table with a single column called "col" 
-      cachedObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector(
-          ColumnSet.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+      // Treat it as a table with a single column called "col"
+      cachedObjectInspector = ObjectInspectorFactory
+          .getReflectionObjectInspector(ColumnSet.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     } else {
       columnNames = Arrays.asList(columnProperty.split(","));
-      cachedObjectInspector = MetadataListStructObjectInspector.getInstance(columnNames);
+      cachedObjectInspector = MetadataListStructObjectInspector
+          .getInstance(columnNames);
     }
-    LOG.info(getClass().getName() + ": initialized with columnNames: " + columnNames );
+    LOG.info(getClass().getName() + ": initialized with columnNames: "
+        + columnNames);
   }
 
-  public static Object deserialize(ColumnSet c, String row, String sep, String nullString) throws Exception {
+  public static Object deserialize(ColumnSet c, String row, String sep,
+      String nullString) throws Exception {
     if (c.col == null) {
       c.col = new ArrayList<String>();
     } else {
       c.col.clear();
     }
-    String [] l1 = row.split(sep, -1);
+    String[] l1 = row.split(sep, -1);
 
-    for(String s: l1) {
+    for (String s : l1) {
       if (s.equals(nullString)) {
         c.col.add(null);
       } else {
@@ -125,12 +127,13 @@
     }
     return (c);
   }
-  
+
   ColumnSet deserializeCache = new ColumnSet();
+
   public Object deserialize(Writable field) throws SerDeException {
     String row = null;
     if (field instanceof BytesWritable) {
-      BytesWritable b = (BytesWritable)field;
+      BytesWritable b = (BytesWritable) field;
       try {
         row = Text.decode(b.get(), 0, b.getSize());
       } catch (CharacterCodingException e) {
@@ -142,17 +145,17 @@
     try {
       deserialize(deserializeCache, row, separator, nullString);
       if (columnNames != null) {
-        assert(columnNames.size() == deserializeCache.col.size());
+        assert (columnNames.size() == deserializeCache.col.size());
       }
       return deserializeCache;
     } catch (ClassCastException e) {
-      throw new SerDeException( this.getClass().getName() + " expects Text or BytesWritable", e);
+      throw new SerDeException(this.getClass().getName()
+          + " expects Text or BytesWritable", e);
     } catch (Exception e) {
       throw new SerDeException(e);
     }
   }
-  
-  
+
   public ObjectInspector getObjectInspector() throws SerDeException {
     return cachedObjectInspector;
   }
@@ -160,27 +163,33 @@
   public Class<? extends Writable> getSerializedClass() {
     return Text.class;
   }
-  
+
   Text serializeCache = new Text();
-  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+
+  public Writable serialize(Object obj, ObjectInspector objInspector)
+      throws SerDeException {
 
     if (objInspector.getCategory() != Category.STRUCT) {
-      throw new SerDeException(getClass().toString() 
-          + " can only serialize struct types, but we got: " + objInspector.getTypeName());
+      throw new SerDeException(getClass().toString()
+          + " can only serialize struct types, but we got: "
+          + objInspector.getTypeName());
     }
     StructObjectInspector soi = (StructObjectInspector) objInspector;
     List<? extends StructField> fields = soi.getAllStructFieldRefs();
-    
+
     StringBuilder sb = new StringBuilder();
-    for(int i=0; i<fields.size(); i++) {
-      if (i>0) sb.append(separator);
+    for (int i = 0; i < fields.size(); i++) {
+      if (i > 0) {
+        sb.append(separator);
+      }
       Object column = soi.getStructFieldData(obj, fields.get(i));
       if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) {
         // For primitive object, serialize to plain string
         sb.append(column == null ? nullString : column.toString());
       } else {
         // For complex object, serialize to JSON format
-        sb.append(SerDeUtils.getJSONString(column, fields.get(i).getFieldObjectInspector()));
+        sb.append(SerDeUtils.getJSONString(column, fields.get(i)
+            .getFieldObjectInspector()));
       }
     }
     serializeCache.set(sb.toString());