You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/10/19 04:27:38 UTC

svn commit: r1399950 [7/17] - in /hadoop/common/branches/HDFS-2802/hadoop-common-project: hadoop-annotations/ hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ hadoop-auth-examples/ hadoop-auth/ hadoop-auth/src/main/java/org/apac...

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java Fri Oct 19 02:25:55 2012
@@ -164,16 +164,18 @@ public abstract class AbstractMapWritabl
   }
 
   /** @return the conf */
+  @Override
   public Configuration getConf() {
     return conf.get();
   }
 
   /** @param conf the conf to set */
+  @Override
   public void setConf(Configuration conf) {
     this.conf.set(conf);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
     
     // First write out the size of the class table and any classes that are
@@ -187,7 +189,7 @@ public abstract class AbstractMapWritabl
     }
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void readFields(DataInput in) throws IOException {
     
     // Get the number of "unknown" classes

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayWritable.java Fri Oct 19 02:25:55 2012
@@ -88,6 +88,7 @@ public class ArrayWritable implements Wr
 
   public Writable[] get() { return values; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     values = new Writable[in.readInt()];          // construct values
     for (int i = 0; i < values.length; i++) {
@@ -97,6 +98,7 @@ public class ArrayWritable implements Wr
     }
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(values.length);                 // write values
     for (int i = 0; i < values.length; i++) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java Fri Oct 19 02:25:55 2012
@@ -57,12 +57,14 @@ public class BooleanWritable implements 
 
   /**
    */
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readBoolean();
   }
 
   /**
    */
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeBoolean(value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java Fri Oct 19 02:25:55 2012
@@ -39,10 +39,12 @@ public class ByteWritable implements Wri
   /** Return the value of this ByteWritable. */
   public byte get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readByte();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeByte(value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java Fri Oct 19 02:25:55 2012
@@ -81,6 +81,7 @@ public class BytesWritable extends Binar
    * if you need the returned array to be precisely the length of the data.
    * @return The data is only valid between 0 and getLength() - 1.
    */
+  @Override
   public byte[] getBytes() {
     return bytes;
   }
@@ -97,6 +98,7 @@ public class BytesWritable extends Binar
   /**
    * Get the current size of the buffer.
    */
+  @Override
   public int getLength() {
     return size;
   }
@@ -171,6 +173,7 @@ public class BytesWritable extends Binar
   }
 
   // inherit javadoc
+  @Override
   public void readFields(DataInput in) throws IOException {
     setSize(0); // clear the old data
     setSize(in.readInt());
@@ -178,6 +181,7 @@ public class BytesWritable extends Binar
   }
   
   // inherit javadoc
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(size);
     out.write(bytes, 0, size);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java Fri Oct 19 02:25:55 2012
@@ -45,6 +45,7 @@ public abstract class CompressedWritable
 
   public CompressedWritable() {}
 
+  @Override
   public final void readFields(DataInput in) throws IOException {
     compressed = new byte[in.readInt()];
     in.readFully(compressed, 0, compressed.length);
@@ -70,6 +71,7 @@ public abstract class CompressedWritable
   protected abstract void readFieldsCompressed(DataInput in)
     throws IOException;
 
+  @Override
   public final void write(DataOutput out) throws IOException {
     if (compressed == null) {
       ByteArrayOutputStream deflated = new ByteArrayOutputStream();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputByteBuffer.java Fri Oct 19 02:25:55 2012
@@ -21,8 +21,6 @@ package org.apache.hadoop.io;
 import java.io.DataInputStream;
 import java.io.InputStream;
 import java.nio.ByteBuffer;
-import java.util.LinkedList;
-import java.util.List;
 
 public class DataInputByteBuffer extends DataInputStream {
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java Fri Oct 19 02:25:55 2012
@@ -72,6 +72,7 @@ public class DefaultStringifier<T> imple
     }
   }
 
+  @Override
   public T fromString(String str) throws IOException {
     try {
       byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
@@ -83,6 +84,7 @@ public class DefaultStringifier<T> imple
     }
   }
 
+  @Override
   public String toString(T obj) throws IOException {
     outBuf.reset();
     serializer.serialize(obj);
@@ -91,6 +93,7 @@ public class DefaultStringifier<T> imple
     return new String(Base64.encodeBase64(buf));
   }
 
+  @Override
   public void close() throws IOException {
     inBuf.close();
     outBuf.close();
@@ -189,7 +192,7 @@ public class DefaultStringifier<T> imple
       String[] parts = itemStr.split(SEPARATOR);
 
       for (String part : parts) {
-        if (!part.equals(""))
+        if (!part.isEmpty())
           list.add(stringifier.fromString(part));
       }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DoubleWritable.java Fri Oct 19 02:25:55 2012
@@ -42,10 +42,12 @@ public class DoubleWritable implements W
     set(value);
   }
   
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readDouble();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeDouble(value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java Fri Oct 19 02:25:55 2012
@@ -23,7 +23,6 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.util.EnumSet;
 import java.util.Iterator;
-import java.util.Collection;
 import java.util.AbstractCollection;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -46,8 +45,11 @@ public class EnumSetWritable<E extends E
   EnumSetWritable() {
   }
 
+  @Override
   public Iterator<E> iterator() { return value.iterator(); }
+  @Override
   public int size() { return value.size(); }
+  @Override
   public boolean add(E e) {
     if (value == null) {
       value = EnumSet.of(e);
@@ -109,7 +111,7 @@ public class EnumSetWritable<E extends E
     return value;
   }
 
-  /** {@inheritDoc} */
+  @Override
   @SuppressWarnings("unchecked")
   public void readFields(DataInput in) throws IOException {
     int length = in.readInt();
@@ -127,7 +129,7 @@ public class EnumSetWritable<E extends E
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void write(DataOutput out) throws IOException {
     if (this.value == null) {
       out.writeInt(-1);
@@ -152,6 +154,7 @@ public class EnumSetWritable<E extends E
    * Returns true if <code>o</code> is an EnumSetWritable with the same value,
    * or both are null.
    */
+  @Override
   public boolean equals(Object o) {
     if (o == null) {
       throw new IllegalArgumentException("null argument passed in equal().");
@@ -180,27 +183,25 @@ public class EnumSetWritable<E extends E
     return elementType;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
     if (value == null)
       return 0;
     return (int) value.hashCode();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public String toString() {
     if (value == null)
       return "(null)";
     return value.toString();
   }
 
-  /** {@inheritDoc} */
   @Override
   public Configuration getConf() {
     return this.conf;
   }
 
-  /** {@inheritDoc} */
   @Override
   public void setConf(Configuration conf) {
     this.conf = conf;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java Fri Oct 19 02:25:55 2012
@@ -39,10 +39,12 @@ public class FloatWritable implements Wr
   /** Return the value of this FloatWritable. */
   public float get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readFloat();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeFloat(value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java Fri Oct 19 02:25:55 2012
@@ -114,11 +114,13 @@ public abstract class GenericWritable im
     return instance;
   }
   
+  @Override
   public String toString() {
     return "GW[" + (instance != null ? ("class=" + instance.getClass().getName() +
         ",value=" + instance.toString()) : "(null)") + "]";
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     type = in.readByte();
     Class<? extends Writable> clazz = getTypes()[type & 0xff];
@@ -131,6 +133,7 @@ public abstract class GenericWritable im
     instance.readFields(in);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     if (type == NOT_SET || instance == null)
       throw new IOException("The GenericWritable has NOT been set correctly. type="
@@ -145,10 +148,12 @@ public abstract class GenericWritable im
    */
   abstract protected Class<? extends Writable>[] getTypes();
 
+  @Override
   public Configuration getConf() {
     return conf;
   }
 
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java Fri Oct 19 02:25:55 2012
@@ -25,6 +25,7 @@ import java.nio.channels.FileChannel;
 import java.nio.channels.WritableByteChannel;
 
 import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -36,6 +37,7 @@ import org.apache.hadoop.conf.Configurat
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class IOUtils {
+  public static final Log LOG = LogFactory.getLog(IOUtils.class);
 
   /**
    * Copies from one stream to another.
@@ -206,12 +208,20 @@ public class IOUtils {
    * for any reason (including EOF)
    */
   public static void skipFully(InputStream in, long len) throws IOException {
-    while (len > 0) {
-      long ret = in.skip(len);
-      if (ret < 0) {
-        throw new IOException( "Premature EOF from inputStream");
+    long amt = len;
+    while (amt > 0) {
+      long ret = in.skip(amt);
+      if (ret == 0) {
+        // skip may return 0 even if we're not at EOF.  Luckily, we can 
+        // use the read() method to figure out if we're at the end.
+        int b = in.read();
+        if (b == -1) {
+          throw new EOFException( "Premature EOF from inputStream after " +
+              "skipping " + (len - amt) + " byte(s).");
+        }
+        ret = 1;
       }
-      len -= ret;
+      amt -= ret;
     }
   }
   
@@ -227,7 +237,7 @@ public class IOUtils {
       if (c != null) {
         try {
           c.close();
-        } catch(IOException e) {
+        } catch(Throwable e) {
           if (log != null && log.isDebugEnabled()) {
             log.debug("Exception in closing " + c, e);
           }
@@ -256,6 +266,7 @@ public class IOUtils {
       try {
         sock.close();
       } catch (IOException ignored) {
+        LOG.debug("Ignoring exception while closing socket", ignored);
       }
     }
   }
@@ -264,9 +275,11 @@ public class IOUtils {
    * The /dev/null of OutputStreams.
    */
   public static class NullOutputStream extends OutputStream {
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
     }
 
+    @Override
     public void write(int b) throws IOException {
     }
   }  

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java Fri Oct 19 02:25:55 2012
@@ -42,10 +42,12 @@ public class IntWritable implements Writ
   /** Return the value of this IntWritable. */
   public int get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readInt();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java Fri Oct 19 02:25:55 2012
@@ -42,15 +42,18 @@ public class LongWritable implements Wri
   /** Return the value of this LongWritable. */
   public long get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = in.readLong();
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeLong(value);
   }
 
   /** Returns true iff <code>o</code> is a LongWritable with the same value. */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof LongWritable))
       return false;
@@ -58,17 +61,20 @@ public class LongWritable implements Wri
     return this.value == other.value;
   }
 
+  @Override
   public int hashCode() {
     return (int)value;
   }
 
   /** Compares two LongWritables. */
+  @Override
   public int compareTo(LongWritable o) {
     long thisValue = this.value;
     long thatValue = o.value;
     return (thisValue<thatValue ? -1 : (thisValue==thatValue ? 0 : 1));
   }
 
+  @Override
   public String toString() {
     return Long.toString(value);
   }
@@ -79,6 +85,7 @@ public class LongWritable implements Wri
       super(LongWritable.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       long thisValue = readLong(b1, s1);
@@ -94,6 +101,7 @@ public class LongWritable implements Wri
     public int compare(WritableComparable a, WritableComparable b) {
       return -super.compare(a, b);
     }
+    @Override
     public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
       return -super.compare(b1, s1, l1, b2, s2, l2);
     }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java Fri Oct 19 02:25:55 2012
@@ -36,6 +36,7 @@ public class MD5Hash implements Writable
   public static final int MD5_LEN = 16;
 
   private static ThreadLocal<MessageDigest> DIGESTER_FACTORY = new ThreadLocal<MessageDigest>() {
+    @Override
     protected MessageDigest initialValue() {
       try {
         return MessageDigest.getInstance("MD5");
@@ -65,6 +66,7 @@ public class MD5Hash implements Writable
   }
   
   // javadoc from Writable
+  @Override
   public void readFields(DataInput in) throws IOException {
     in.readFully(digest);
   }
@@ -77,6 +79,7 @@ public class MD5Hash implements Writable
   }
 
   // javadoc from Writable
+  @Override
   public void write(DataOutput out) throws IOException {
     out.write(digest);
   }
@@ -155,6 +158,7 @@ public class MD5Hash implements Writable
 
   /** Returns true iff <code>o</code> is an MD5Hash whose digest contains the
    * same values.  */
+  @Override
   public boolean equals(Object o) {
     if (!(o instanceof MD5Hash))
       return false;
@@ -165,12 +169,14 @@ public class MD5Hash implements Writable
   /** Returns a hash code value for this object.
    * Only uses the first 4 bytes, since md5s are evenly distributed.
    */
+  @Override
   public int hashCode() {
     return quarterDigest();
   }
 
 
   /** Compares this object with the specified object for order.*/
+  @Override
   public int compareTo(MD5Hash that) {
     return WritableComparator.compareBytes(this.digest, 0, MD5_LEN,
                                            that.digest, 0, MD5_LEN);
@@ -182,6 +188,7 @@ public class MD5Hash implements Writable
       super(MD5Hash.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       return compareBytes(b1, s1, MD5_LEN, b2, s2, MD5_LEN);
@@ -196,6 +203,7 @@ public class MD5Hash implements Writable
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
 
   /** Returns a string representation of this object. */
+  @Override
   public String toString() {
     StringBuilder buf = new StringBuilder(MD5_LEN*2);
     for (int i = 0; i < MD5_LEN; i++) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java Fri Oct 19 02:25:55 2012
@@ -296,6 +296,7 @@ public class MapFile {
     }
 
     /** Close the map. */
+    @Override
     public synchronized void close() throws IOException {
       data.close();
       index.close();
@@ -723,6 +724,7 @@ public class MapFile {
     }
 
     /** Close the map. */
+    @Override
     public synchronized void close() throws IOException {
       if (!indexClosed) {
         index.close();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapWritable.java Fri Oct 19 02:25:55 2012
@@ -55,27 +55,27 @@ public class MapWritable extends Abstrac
     copy(other);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public void clear() {
     instance.clear();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsKey(Object key) {
     return instance.containsKey(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsValue(Object value) {
     return instance.containsValue(value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<Map.Entry<Writable, Writable>> entrySet() {
     return instance.entrySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean equals(Object obj) {
     if (this == obj) {
       return true;
@@ -93,27 +93,27 @@ public class MapWritable extends Abstrac
     return false;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable get(Object key) {
     return instance.get(key);
   }
   
-  /** {@inheritDoc} */
+  @Override
   public int hashCode() {
     return 1 + this.instance.hashCode();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean isEmpty() {
     return instance.isEmpty();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<Writable> keySet() {
     return instance.keySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   @SuppressWarnings("unchecked")
   public Writable put(Writable key, Writable value) {
     addToMap(key.getClass());
@@ -121,31 +121,30 @@ public class MapWritable extends Abstrac
     return instance.put(key, value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void putAll(Map<? extends Writable, ? extends Writable> t) {
     for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
       put(e.getKey(), e.getValue());
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable remove(Object key) {
     return instance.remove(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int size() {
     return instance.size();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Collection<Writable> values() {
     return instance.values();
   }
   
   // Writable
   
-  /** {@inheritDoc} */
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);
@@ -164,7 +163,6 @@ public class MapWritable extends Abstrac
     }
   }
 
-  /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @Override
   public void readFields(DataInput in) throws IOException {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java Fri Oct 19 02:25:55 2012
@@ -35,6 +35,7 @@ public class NullWritable implements Wri
   /** Returns the single instance of this class. */
   public static NullWritable get() { return THIS; }
   
+  @Override
   public String toString() {
     return "(null)";
   }
@@ -46,8 +47,11 @@ public class NullWritable implements Wri
   public int compareTo(NullWritable other) {
     return 0;
   }
+  @Override
   public boolean equals(Object other) { return other instanceof NullWritable; }
+  @Override
   public void readFields(DataInput in) throws IOException {}
+  @Override
   public void write(DataOutput out) throws IOException {}
 
   /** A Comparator &quot;optimized&quot; for NullWritable. */

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java Fri Oct 19 02:25:55 2012
@@ -66,15 +66,18 @@ public class ObjectWritable implements W
     this.instance = instance;
   }
   
+  @Override
   public String toString() {
     return "OW[class=" + declaredClass + ",value=" + instance + "]";
   }
 
   
+  @Override
   public void readFields(DataInput in) throws IOException {
     readObject(in, this, this.conf);
   }
   
+  @Override
   public void write(DataOutput out) throws IOException {
     writeObject(out, instance, declaredClass, conf);
   }
@@ -99,6 +102,7 @@ public class ObjectWritable implements W
       super(conf);
       this.declaredClass = declaredClass;
     }
+    @Override
     public void readFields(DataInput in) throws IOException {
       String className = UTF8.readString(in);
       declaredClass = PRIMITIVE_NAMES.get(className);
@@ -110,6 +114,7 @@ public class ObjectWritable implements W
         }
       }
     }
+    @Override
     public void write(DataOutput out) throws IOException {
       UTF8.writeString(out, declaredClass.getName());
     }
@@ -375,10 +380,12 @@ public class ObjectWritable implements W
     return declaredClass;
   }
 
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }
 
+  @Override
   public Configuration getConf() {
     return this.conf;
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java Fri Oct 19 02:25:55 2012
@@ -50,6 +50,7 @@ public class OutputBuffer extends Filter
   private static class Buffer extends ByteArrayOutputStream {
     public byte[] getData() { return buf; }
     public int getLength() { return count; }
+    @Override
     public void reset() { count = 0; }
 
     public void write(InputStream in, int len) throws IOException {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java Fri Oct 19 02:25:55 2012
@@ -194,6 +194,7 @@ public class ReadaheadPool {
       this.len = len;
     }
     
+    @Override
     public void run() {
       if (canceled) return;
       // There's a very narrow race here that the file will close right at

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java Fri Oct 19 02:25:55 2012
@@ -24,7 +24,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Fri Oct 19 02:25:55 2012
@@ -47,6 +47,7 @@ import org.apache.hadoop.util.Reflection
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.MergeSort;
 import org.apache.hadoop.util.PriorityQueue;
+import org.apache.hadoop.util.Time;
 
 /** 
  * <code>SequenceFile</code>s are flat files consisting of binary key/value 
@@ -624,15 +625,18 @@ public class SequenceFile {
       dataSize = length;
     }
     
+    @Override
     public int getSize() {
       return dataSize;
     }
     
+    @Override
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
       outStream.write(data, 0, dataSize);
     }
 
+    @Override
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException {
       throw 
@@ -665,10 +669,12 @@ public class SequenceFile {
       dataSize = length;
     }
     
+    @Override
     public int getSize() {
       return dataSize;
     }
     
+    @Override
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException {
       if (decompressedStream == null) {
@@ -686,6 +692,7 @@ public class SequenceFile {
       }
     }
 
+    @Override
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException {
       outStream.write(data, 0, dataSize);
@@ -727,6 +734,7 @@ public class SequenceFile {
       return new TreeMap<Text, Text>(this.theMetadata);
     }
     
+    @Override
     public void write(DataOutput out) throws IOException {
       out.writeInt(this.theMetadata.size());
       Iterator<Map.Entry<Text, Text>> iter =
@@ -738,6 +746,7 @@ public class SequenceFile {
       }
     }
 
+    @Override
     public void readFields(DataInput in) throws IOException {
       int sz = in.readInt();
       if (sz < 0) throw new IOException("Invalid size: " + sz + " for file metadata object");
@@ -751,6 +760,7 @@ public class SequenceFile {
       }    
     }
 
+    @Override
     public boolean equals(Object other) {
       if (other == null) {
         return false;
@@ -787,11 +797,13 @@ public class SequenceFile {
       return true;
     }
 
+    @Override
     public int hashCode() {
       assert false : "hashCode not designed";
       return 42; // any arbitrary constant will do 
     }
     
+    @Override
     public String toString() {
       StringBuilder sb = new StringBuilder();
       sb.append("size: ").append(this.theMetadata.size()).append("\n");
@@ -835,7 +847,7 @@ public class SequenceFile {
     {
       try {                                       
         MessageDigest digester = MessageDigest.getInstance("MD5");
-        long time = System.currentTimeMillis();
+        long time = Time.now();
         digester.update((new UID()+"@"+time).getBytes());
         sync = digester.digest();
       } catch (Exception e) {
@@ -1160,8 +1172,26 @@ public class SequenceFile {
       this.metadata = metadata;
       SerializationFactory serializationFactory = new SerializationFactory(conf);
       this.keySerializer = serializationFactory.getSerializer(keyClass);
+      if (this.keySerializer == null) {
+        throw new IOException(
+            "Could not find a serializer for the Key class: '"
+                + keyClass.getCanonicalName() + "'. "
+                + "Please ensure that the configuration '" +
+                CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
+                + "properly configured, if you're using"
+                + "custom serialization.");
+      }
       this.keySerializer.open(buffer);
       this.uncompressedValSerializer = serializationFactory.getSerializer(valClass);
+      if (this.uncompressedValSerializer == null) {
+        throw new IOException(
+            "Could not find a serializer for the Value class: '"
+                + valClass.getCanonicalName() + "'. "
+                + "Please ensure that the configuration '" +
+                CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
+                + "properly configured, if you're using"
+                + "custom serialization.");
+      }
       this.uncompressedValSerializer.open(buffer);
       if (this.codec != null) {
         ReflectionUtils.setConf(this.codec, this.conf);
@@ -1170,6 +1200,15 @@ public class SequenceFile {
         this.deflateOut = 
           new DataOutputStream(new BufferedOutputStream(deflateFilter));
         this.compressedValSerializer = serializationFactory.getSerializer(valClass);
+        if (this.compressedValSerializer == null) {
+          throw new IOException(
+              "Could not find a serializer for the Value class: '"
+                  + valClass.getCanonicalName() + "'. "
+                  + "Please ensure that the configuration '" +
+                  CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
+                  + "properly configured, if you're using"
+                  + "custom serialization.");
+        }
         this.compressedValSerializer.open(deflateOut);
       }
       writeFileHeader();
@@ -1222,6 +1261,7 @@ public class SequenceFile {
     Configuration getConf() { return conf; }
     
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
       keySerializer.close();
       uncompressedValSerializer.close();
@@ -1332,6 +1372,7 @@ public class SequenceFile {
     }
 
     /** Append a key/value pair. */
+    @Override
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
       throws IOException {
@@ -1364,6 +1405,7 @@ public class SequenceFile {
     }
 
     /** Append a key/value pair. */
+    @Override
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
         int keyLength, ValueBytes val) throws IOException {
 
@@ -1421,6 +1463,7 @@ public class SequenceFile {
     }
     
     /** Compress and flush contents to dfs */
+    @Override
     public synchronized void sync() throws IOException {
       if (noBufferedRecords > 0) {
         super.sync();
@@ -1450,6 +1493,7 @@ public class SequenceFile {
     }
     
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
       if (out != null) {
         sync();
@@ -1458,6 +1502,7 @@ public class SequenceFile {
     }
 
     /** Append a key/value pair. */
+    @Override
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
       throws IOException {
@@ -1490,6 +1535,7 @@ public class SequenceFile {
     }
     
     /** Append a key/value pair. */
+    @Override
     public synchronized void appendRaw(byte[] keyData, int keyOffset,
         int keyLength, ValueBytes val) throws IOException {
       
@@ -1897,6 +1943,15 @@ public class SequenceFile {
           new SerializationFactory(conf);
         this.keyDeserializer =
           getDeserializer(serializationFactory, getKeyClass());
+        if (this.keyDeserializer == null) {
+          throw new IOException(
+              "Could not find a deserializer for the Key class: '"
+                  + getKeyClass().getCanonicalName() + "'. "
+                  + "Please ensure that the configuration '" +
+                  CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
+                  + "properly configured, if you're using "
+                  + "custom serialization.");
+        }
         if (!blockCompressed) {
           this.keyDeserializer.open(valBuffer);
         } else {
@@ -1904,6 +1959,15 @@ public class SequenceFile {
         }
         this.valDeserializer =
           getDeserializer(serializationFactory, getValueClass());
+        if (this.valDeserializer == null) {
+          throw new IOException(
+              "Could not find a deserializer for the Value class: '"
+                  + getValueClass().getCanonicalName() + "'. "
+                  + "Please ensure that the configuration '" +
+                  CommonConfigurationKeys.IO_SERIALIZATIONS_KEY + "' is "
+                  + "properly configured, if you're using "
+                  + "custom serialization.");
+        }
         this.valDeserializer.open(valIn);
       }
     }
@@ -1914,6 +1978,7 @@ public class SequenceFile {
     }
     
     /** Close the file. */
+    @Override
     public synchronized void close() throws IOException {
       // Return the decompressors to the pool
       CodecPool.returnDecompressor(keyLenDecompressor);
@@ -2572,6 +2637,7 @@ public class SequenceFile {
     }
 
     /** Returns the name of the file. */
+    @Override
     public String toString() {
       return filename;
     }
@@ -2902,6 +2968,7 @@ public class SequenceFile {
         mergeSort.mergeSort(pointersCopy, pointers, 0, count);
       }
       class SeqFileComparator implements Comparator<IntWritable> {
+        @Override
         public int compare(IntWritable I, IntWritable J) {
           return comparator.compare(rawBuffer, keyOffsets[I.get()], 
                                     keyLengths[I.get()], rawBuffer, 
@@ -3175,6 +3242,7 @@ public class SequenceFile {
         this.tmpDir = tmpDir;
         this.progress = progress;
       }
+      @Override
       protected boolean lessThan(Object a, Object b) {
         // indicate we're making progress
         if (progress != null) {
@@ -3186,6 +3254,7 @@ public class SequenceFile {
                                   msa.getKey().getLength(), msb.getKey().getData(), 0, 
                                   msb.getKey().getLength()) < 0;
       }
+      @Override
       public void close() throws IOException {
         SegmentDescriptor ms;                           // close inputs
         while ((ms = (SegmentDescriptor)pop()) != null) {
@@ -3193,12 +3262,15 @@ public class SequenceFile {
         }
         minSegment = null;
       }
+      @Override
       public DataOutputBuffer getKey() throws IOException {
         return rawKey;
       }
+      @Override
       public ValueBytes getValue() throws IOException {
         return rawValue;
       }
+      @Override
       public boolean next() throws IOException {
         if (size() == 0)
           return false;
@@ -3226,6 +3298,7 @@ public class SequenceFile {
         return true;
       }
       
+      @Override
       public Progress getProgress() {
         return mergeProgress; 
       }
@@ -3423,6 +3496,7 @@ public class SequenceFile {
         return preserveInput;
       }
       
+      @Override
       public int compareTo(Object o) {
         SegmentDescriptor that = (SegmentDescriptor)o;
         if (this.segmentLength != that.segmentLength) {
@@ -3435,6 +3509,7 @@ public class SequenceFile {
           compareTo(that.segmentPathName.toString());
       }
 
+      @Override
       public boolean equals(Object o) {
         if (!(o instanceof SegmentDescriptor)) {
           return false;
@@ -3449,6 +3524,7 @@ public class SequenceFile {
         return false;
       }
 
+      @Override
       public int hashCode() {
         return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
       }
@@ -3538,12 +3614,14 @@ public class SequenceFile {
       /** The default cleanup. Subclasses can override this with a custom 
        * cleanup 
        */
+      @Override
       public void cleanup() throws IOException {
         super.close();
         if (super.shouldPreserveInput()) return;
         parentContainer.cleanup();
       }
       
+      @Override
       public boolean equals(Object o) {
         if (!(o instanceof LinkedSegmentsDescriptor)) {
           return false;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java Fri Oct 19 02:25:55 2012
@@ -87,6 +87,7 @@ public class SetFile extends MapFile {
     }
 
     // javadoc inherited
+    @Override
     public boolean seek(WritableComparable key)
       throws IOException {
       return super.seek(key);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SortedMapWritable.java Fri Oct 19 02:25:55 2012
@@ -57,86 +57,86 @@ public class SortedMapWritable extends A
     copy(other);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Comparator<? super WritableComparable> comparator() {
     // Returning null means we use the natural ordering of the keys
     return null;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public WritableComparable firstKey() {
     return instance.firstKey();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   headMap(WritableComparable toKey) {
     
     return instance.headMap(toKey);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public WritableComparable lastKey() {
     return instance.lastKey();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   subMap(WritableComparable fromKey, WritableComparable toKey) {
     
     return instance.subMap(fromKey, toKey);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public SortedMap<WritableComparable, Writable>
   tailMap(WritableComparable fromKey) {
     
     return instance.tailMap(fromKey);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void clear() {
     instance.clear();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsKey(Object key) {
     return instance.containsKey(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean containsValue(Object value) {
     return instance.containsValue(value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<java.util.Map.Entry<WritableComparable, Writable>> entrySet() {
     return instance.entrySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable get(Object key) {
     return instance.get(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public boolean isEmpty() {
     return instance.isEmpty();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Set<WritableComparable> keySet() {
     return instance.keySet();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable put(WritableComparable key, Writable value) {
     addToMap(key.getClass());
     addToMap(value.getClass());
     return instance.put(key, value);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public void putAll(Map<? extends WritableComparable, ? extends Writable> t) {
     for (Map.Entry<? extends WritableComparable, ? extends Writable> e:
       t.entrySet()) {
@@ -145,22 +145,21 @@ public class SortedMapWritable extends A
     }
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Writable remove(Object key) {
     return instance.remove(key);
   }
 
-  /** {@inheritDoc} */
+  @Override
   public int size() {
     return instance.size();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public Collection<Writable> values() {
     return instance.values();
   }
 
-  /** {@inheritDoc} */
   @SuppressWarnings("unchecked")
   @Override
   public void readFields(DataInput in) throws IOException {
@@ -187,7 +186,6 @@ public class SortedMapWritable extends A
     }
   }
 
-  /** {@inheritDoc} */
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Stringifier.java Fri Oct 19 02:25:55 2012
@@ -54,6 +54,7 @@ public interface Stringifier<T> extends 
    * Closes this object. 
    * @throws IOException if an I/O error occurs 
    * */
+  @Override
   public void close() throws IOException;
   
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java Fri Oct 19 02:25:55 2012
@@ -55,6 +55,7 @@ public class Text extends BinaryComparab
   
   private static ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
     new ThreadLocal<CharsetEncoder>() {
+      @Override
       protected CharsetEncoder initialValue() {
         return Charset.forName("UTF-8").newEncoder().
                onMalformedInput(CodingErrorAction.REPORT).
@@ -64,6 +65,7 @@ public class Text extends BinaryComparab
   
   private static ThreadLocal<CharsetDecoder> DECODER_FACTORY =
     new ThreadLocal<CharsetDecoder>() {
+    @Override
     protected CharsetDecoder initialValue() {
       return Charset.forName("UTF-8").newDecoder().
              onMalformedInput(CodingErrorAction.REPORT).
@@ -112,11 +114,13 @@ public class Text extends BinaryComparab
    * valid. Please use {@link #copyBytes()} if you
    * need the returned array to be precisely the length of the data.
    */
+  @Override
   public byte[] getBytes() {
     return bytes;
   }
 
   /** Returns the number of bytes in the byte array */ 
+  @Override
   public int getLength() {
     return length;
   }
@@ -281,6 +285,7 @@ public class Text extends BinaryComparab
   
   /** deserialize 
    */
+  @Override
   public void readFields(DataInput in) throws IOException {
     int newLength = WritableUtils.readVInt(in);
     setCapacity(newLength, false);
@@ -313,6 +318,7 @@ public class Text extends BinaryComparab
    * length uses zero-compressed encoding
    * @see Writable#write(DataOutput)
    */
+  @Override
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVInt(out, length);
     out.write(bytes, 0, length);
@@ -329,6 +335,7 @@ public class Text extends BinaryComparab
   }
 
   /** Returns true iff <code>o</code> is a Text with the same contents.  */
+  @Override
   public boolean equals(Object o) {
     if (o instanceof Text)
       return super.equals(o);
@@ -346,6 +353,7 @@ public class Text extends BinaryComparab
       super(Text.class);
     }
 
+    @Override
     public int compare(byte[] b1, int s1, int l1,
                        byte[] b2, int s2, int l2) {
       int n1 = WritableUtils.decodeVIntSize(b1[s1]);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/TwoDArrayWritable.java Fri Oct 19 02:25:55 2012
@@ -57,6 +57,7 @@ public class TwoDArrayWritable implement
 
   public Writable[][] get() { return values; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     // construct matrix
     values = new Writable[in.readInt()][];          
@@ -81,6 +82,7 @@ public class TwoDArrayWritable implement
     }
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeInt(values.length);                 // write values
     for (int i = 0; i < values.length; i++) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java Fri Oct 19 02:25:55 2012
@@ -110,6 +110,7 @@ public class UTF8 implements WritableCom
     System.arraycopy(other.bytes, 0, bytes, 0, length);
   }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     length = in.readUnsignedShort();
     if (bytes == null || bytes.length < length)
@@ -123,6 +124,7 @@ public class UTF8 implements WritableCom
     WritableUtils.skipFully(in, length);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeShort(length);
     out.write(bytes, 0, length);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java Fri Oct 19 02:25:55 2012
@@ -43,10 +43,12 @@ public class VIntWritable implements Wri
   /** Return the value of this VIntWritable. */
   public int get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = WritableUtils.readVInt(in);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVInt(out, value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java Fri Oct 19 02:25:55 2012
@@ -43,10 +43,12 @@ public class VLongWritable implements Wr
   /** Return the value of this LongWritable. */
   public long get() { return value; }
 
+  @Override
   public void readFields(DataInput in) throws IOException {
     value = WritableUtils.readVLong(in);
   }
 
+  @Override
   public void write(DataOutput out) throws IOException {
     WritableUtils.writeVLong(out, value);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionMismatchException.java Fri Oct 19 02:25:55 2012
@@ -39,6 +39,7 @@ public class VersionMismatchException ex
   }
 
   /** Returns a string representation of this object. */
+  @Override
   public String toString(){
     return "A record version mismatch occured. Expecting v"
       + expectedVersion + ", found v" + foundVersion; 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java Fri Oct 19 02:25:55 2012
@@ -40,11 +40,13 @@ public abstract class VersionedWritable 
   public abstract byte getVersion();
     
   // javadoc from Writable
+  @Override
   public void write(DataOutput out) throws IOException {
     out.writeByte(getVersion());                  // store version
   }
 
   // javadoc from Writable
+  @Override
   public void readFields(DataInput in) throws IOException {
     byte version = in.readByte();                 // read version
     if (version != getVersion())

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java Fri Oct 19 02:25:55 2012
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
-import java.util.*;
+import java.io.DataInput;
+import java.io.IOException;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -38,12 +39,11 @@ import org.apache.hadoop.util.Reflection
 @InterfaceStability.Stable
 public class WritableComparator implements RawComparator {
 
-  private static HashMap<Class, WritableComparator> comparators =
-    new HashMap<Class, WritableComparator>(); // registry
+  private static final ConcurrentHashMap<Class, WritableComparator> comparators 
+          = new ConcurrentHashMap<Class, WritableComparator>(); // registry
 
   /** Get a comparator for a {@link WritableComparable} implementation. */
-  public static synchronized 
-  WritableComparator get(Class<? extends WritableComparable> c) {
+  public static WritableComparator get(Class<? extends WritableComparable> c) {
     WritableComparator comparator = comparators.get(c);
     if (comparator == null) {
       // force the static initializers to run
@@ -76,17 +76,19 @@ public class WritableComparator implemen
   /** Register an optimized comparator for a {@link WritableComparable}
    * implementation. Comparators registered with this method must be
    * thread-safe. */
-  public static synchronized void define(Class c,
-                                         WritableComparator comparator) {
+  public static void define(Class c, WritableComparator comparator) {
     comparators.put(c, comparator);
   }
 
-
   private final Class<? extends WritableComparable> keyClass;
   private final WritableComparable key1;
   private final WritableComparable key2;
   private final DataInputBuffer buffer;
 
+  protected WritableComparator() {
+    this(null);
+  }
+
   /** Construct for a {@link WritableComparable} implementation. */
   protected WritableComparator(Class<? extends WritableComparable> keyClass) {
     this(keyClass, false);
@@ -120,6 +122,7 @@ public class WritableComparator implemen
    * Writable#readFields(DataInput)}, then calls {@link
    * #compare(WritableComparable,WritableComparable)}.
    */
+  @Override
   public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
     try {
       buffer.reset(b1, s1, l1);                   // parse key1
@@ -144,6 +147,7 @@ public class WritableComparator implemen
     return a.compareTo(b);
   }
 
+  @Override
   public int compare(Object a, Object b) {
     return compare((WritableComparable)a, (WritableComparable)b);
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java Fri Oct 19 02:25:55 2012
@@ -63,6 +63,7 @@ public class BZip2Codec implements Split
   * @throws java.io.IOException
   *             Throws IO exception
   */
+  @Override
   public CompressionOutputStream createOutputStream(OutputStream out)
       throws IOException {
     return new BZip2CompressionOutputStream(out);
@@ -74,6 +75,7 @@ public class BZip2Codec implements Split
   * @return CompressionOutputStream
     @throws java.io.IOException
    */
+  @Override
   public CompressionOutputStream createOutputStream(OutputStream out,
       Compressor compressor) throws IOException {
     return createOutputStream(out);
@@ -84,6 +86,7 @@ public class BZip2Codec implements Split
   *
   * @return BZip2DummyCompressor.class
   */
+  @Override
   public Class<? extends org.apache.hadoop.io.compress.Compressor> getCompressorType() {
     return BZip2DummyCompressor.class;
   }
@@ -93,6 +96,7 @@ public class BZip2Codec implements Split
   *
   * @return Compressor
   */
+  @Override
   public Compressor createCompressor() {
     return new BZip2DummyCompressor();
   }
@@ -106,6 +110,7 @@ public class BZip2Codec implements Split
   * @throws java.io.IOException
   *             Throws IOException
   */
+  @Override
   public CompressionInputStream createInputStream(InputStream in)
       throws IOException {
     return new BZip2CompressionInputStream(in);
@@ -116,6 +121,7 @@ public class BZip2Codec implements Split
   *
   * @return CompressionInputStream
   */
+  @Override
   public CompressionInputStream createInputStream(InputStream in,
       Decompressor decompressor) throws IOException {
     return createInputStream(in);
@@ -133,6 +139,7 @@ public class BZip2Codec implements Split
    *
    * @return CompressionInputStream for BZip2 aligned at block boundaries
    */
+  @Override
   public SplitCompressionInputStream createInputStream(InputStream seekableIn,
       Decompressor decompressor, long start, long end, READ_MODE readMode)
       throws IOException {
@@ -181,6 +188,7 @@ public class BZip2Codec implements Split
   *
   * @return BZip2DummyDecompressor.class
   */
+  @Override
   public Class<? extends org.apache.hadoop.io.compress.Decompressor> getDecompressorType() {
     return BZip2DummyDecompressor.class;
   }
@@ -190,6 +198,7 @@ public class BZip2Codec implements Split
   *
   * @return Decompressor
   */
+  @Override
   public Decompressor createDecompressor() {
     return new BZip2DummyDecompressor();
   }
@@ -199,6 +208,7 @@ public class BZip2Codec implements Split
   *
   * @return A String telling the default bzip2 file extension
   */
+  @Override
   public String getDefaultExtension() {
     return ".bz2";
   }
@@ -226,6 +236,7 @@ public class BZip2Codec implements Split
       }
     }
 
+    @Override
     public void finish() throws IOException {
       if (needsReset) {
         // In the case that nothing is written to this stream, we still need to
@@ -245,12 +256,14 @@ public class BZip2Codec implements Split
       }
     }    
     
+    @Override
     public void resetState() throws IOException {
       // Cannot write to out at this point because out might not be ready
       // yet, as in SequenceFile.Writer implementation.
       needsReset = true;
     }
 
+    @Override
     public void write(int b) throws IOException {
       if (needsReset) {
         internalReset();
@@ -258,6 +271,7 @@ public class BZip2Codec implements Split
       this.output.write(b);
     }
 
+    @Override
     public void write(byte[] b, int off, int len) throws IOException {
       if (needsReset) {
         internalReset();
@@ -265,6 +279,7 @@ public class BZip2Codec implements Split
       this.output.write(b, off, len);
     }
 
+    @Override
     public void close() throws IOException {
       if (needsReset) {
         // In the case that nothing is written to this stream, we still need to
@@ -382,6 +397,7 @@ public class BZip2Codec implements Split
 
     }// end of method
 
+    @Override
     public void close() throws IOException {
       if (!needsReset) {
         input.close();
@@ -417,6 +433,7 @@ public class BZip2Codec implements Split
     *
     */
 
+    @Override
     public int read(byte[] b, int off, int len) throws IOException {
       if (needsReset) {
         internalReset();
@@ -440,6 +457,7 @@ public class BZip2Codec implements Split
 
     }
 
+    @Override
     public int read() throws IOException {
       byte b[] = new byte[1];
       int result = this.read(b, 0, 1);
@@ -454,6 +472,7 @@ public class BZip2Codec implements Split
       }
     }    
     
+    @Override
     public void resetState() throws IOException {
       // Cannot read from bufferedIn at this point because bufferedIn
       // might not be ready
@@ -461,6 +480,7 @@ public class BZip2Codec implements Split
       needsReset = true;
     }
 
+    @Override
     public long getPos() {
       return this.compressedStreamPosition;
       }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockCompressorStream.java Fri Oct 19 02:25:55 2012
@@ -78,6 +78,7 @@ public class BlockCompressorStream exten
    * Each block contains the uncompressed length for the block, followed by
    * one or more length-prefixed blocks of compressed data.
    */
+  @Override
   public void write(byte[] b, int off, int len) throws IOException {
     // Sanity checks
     if (compressor.finished()) {
@@ -132,6 +133,7 @@ public class BlockCompressorStream exten
     }
   }
 
+  @Override
   public void finish() throws IOException {
     if (!compressor.finished()) {
       rawWriteInt((int)compressor.getBytesRead());
@@ -142,6 +144,7 @@ public class BlockCompressorStream exten
     }
   }
 
+  @Override
   protected void compress() throws IOException {
     int len = compressor.compress(buffer, 0, buffer.length);
     if (len > 0) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java Fri Oct 19 02:25:55 2012
@@ -65,6 +65,7 @@ public class BlockDecompressorStream ext
     super(in);
   }
 
+  @Override
   protected int decompress(byte[] b, int off, int len) throws IOException {
     // Check if we are the beginning of a block
     if (noUncompressedBytes == originalBlockSize) {
@@ -104,6 +105,7 @@ public class BlockDecompressorStream ext
     return n;
   }
 
+  @Override
   protected int getCompressedData() throws IOException {
     checkStream();
 
@@ -126,6 +128,7 @@ public class BlockDecompressorStream ext
     return len;
   }
 
+  @Override
   public void resetState() throws IOException {
     originalBlockSize = 0;
     noUncompressedBytes = 0;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java Fri Oct 19 02:25:55 2012
@@ -75,6 +75,7 @@ public class CompressionCodecFactory {
   /**
    * Print the extension map out as a string.
    */
+  @Override
   public String toString() {
     StringBuilder buf = new StringBuilder();
     Iterator<Map.Entry<String, CompressionCodec>> itr = 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java Fri Oct 19 02:25:55 2012
@@ -55,6 +55,7 @@ public abstract class CompressionInputSt
     this.in = in;
   }
 
+  @Override
   public void close() throws IOException {
     in.close();
   }
@@ -63,6 +64,7 @@ public abstract class CompressionInputSt
    * Read bytes from the stream.
    * Made abstract to prevent leakage to underlying stream.
    */
+  @Override
   public abstract int read(byte[] b, int off, int len) throws IOException;
 
   /**
@@ -76,6 +78,7 @@ public abstract class CompressionInputSt
    *
    * @return Current position in stream as a long
    */
+  @Override
   public long getPos() throws IOException {
     if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)){
       //This way of getting the current position will not work for file
@@ -95,6 +98,7 @@ public abstract class CompressionInputSt
    * @throws UnsupportedOperationException
    */
 
+  @Override
   public void seek(long pos) throws UnsupportedOperationException {
     throw new UnsupportedOperationException();
   }
@@ -104,6 +108,7 @@ public abstract class CompressionInputSt
    *
    * @throws UnsupportedOperationException
    */
+  @Override
   public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException {
     throw new UnsupportedOperationException();
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java Fri Oct 19 02:25:55 2012
@@ -44,11 +44,13 @@ public abstract class CompressionOutputS
     this.out = out;
   }
   
+  @Override
   public void close() throws IOException {
     finish();
     out.close();
   }
   
+  @Override
   public void flush() throws IOException {
     out.flush();
   }
@@ -57,6 +59,7 @@ public abstract class CompressionOutputS
    * Write compressed bytes to the stream.
    * Made abstract to prevent leakage to underlying stream.
    */
+  @Override
   public abstract void write(byte[] b, int off, int len) throws IOException;
 
   /**

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java Fri Oct 19 02:25:55 2012
@@ -59,6 +59,7 @@ public class CompressorStream extends Co
     super(out);
   }
 
+  @Override
   public void write(byte[] b, int off, int len) throws IOException {
     // Sanity checks
     if (compressor.finished()) {
@@ -83,6 +84,7 @@ public class CompressorStream extends Co
     }
   }
 
+  @Override
   public void finish() throws IOException {
     if (!compressor.finished()) {
       compressor.finish();
@@ -92,10 +94,12 @@ public class CompressorStream extends Co
     }
   }
 
+  @Override
   public void resetState() throws IOException {
     compressor.reset();
   }
   
+  @Override
   public void close() throws IOException {
     if (!closed) {
       finish();
@@ -105,6 +109,7 @@ public class CompressorStream extends Co
   }
 
   private byte[] oneByte = new byte[1];
+  @Override
   public void write(int b) throws IOException {
     oneByte[0] = (byte)(b & 0xff);
     write(oneByte, 0, oneByte.length);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java Fri Oct 19 02:25:55 2012
@@ -66,11 +66,13 @@ public class DecompressorStream extends 
   }
   
   private byte[] oneByte = new byte[1];
+  @Override
   public int read() throws IOException {
     checkStream();
     return (read(oneByte, 0, oneByte.length) == -1) ? -1 : (oneByte[0] & 0xff);
   }
 
+  @Override
   public int read(byte[] b, int off, int len) throws IOException {
     checkStream();
     
@@ -163,11 +165,13 @@ public class DecompressorStream extends 
     }
   }
   
+  @Override
   public void resetState() throws IOException {
     decompressor.reset();
   }
 
   private byte[] skipBytes = new byte[512];
+  @Override
   public long skip(long n) throws IOException {
     // Sanity checks
     if (n < 0) {
@@ -189,11 +193,13 @@ public class DecompressorStream extends 
     return skipped;
   }
 
+  @Override
   public int available() throws IOException {
     checkStream();
     return (eof) ? 0 : 1;
   }
 
+  @Override
   public void close() throws IOException {
     if (!closed) {
       in.close();
@@ -201,13 +207,16 @@ public class DecompressorStream extends 
     }
   }
 
+  @Override
   public boolean markSupported() {
     return false;
   }
 
+  @Override
   public synchronized void mark(int readlimit) {
   }
 
+  @Override
   public synchronized void reset() throws IOException {
     throw new IOException("mark/reset not supported");
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java Fri Oct 19 02:25:55 2012
@@ -37,14 +37,17 @@ public class DefaultCodec implements Con
   
   Configuration conf;
 
+  @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
   }
   
+  @Override
   public Configuration getConf() {
     return conf;
   }
   
+  @Override
   public CompressionOutputStream createOutputStream(OutputStream out) 
   throws IOException {
     // This may leak memory if called in a loop. The createCompressor() call
@@ -57,6 +60,7 @@ public class DefaultCodec implements Con
                                 conf.getInt("io.file.buffer.size", 4*1024));
   }
 
+  @Override
   public CompressionOutputStream createOutputStream(OutputStream out, 
                                                     Compressor compressor) 
   throws IOException {
@@ -64,20 +68,24 @@ public class DefaultCodec implements Con
                                 conf.getInt("io.file.buffer.size", 4*1024));
   }
 
+  @Override
   public Class<? extends Compressor> getCompressorType() {
     return ZlibFactory.getZlibCompressorType(conf);
   }
 
+  @Override
   public Compressor createCompressor() {
     return ZlibFactory.getZlibCompressor(conf);
   }
 
+  @Override
   public CompressionInputStream createInputStream(InputStream in) 
   throws IOException {
     return new DecompressorStream(in, createDecompressor(),
                                   conf.getInt("io.file.buffer.size", 4*1024));
   }
 
+  @Override
   public CompressionInputStream createInputStream(InputStream in, 
                                                   Decompressor decompressor) 
   throws IOException {
@@ -85,14 +93,17 @@ public class DefaultCodec implements Con
                                   conf.getInt("io.file.buffer.size", 4*1024));
   }
 
+  @Override
   public Class<? extends Decompressor> getDecompressorType() {
     return ZlibFactory.getZlibDecompressorType(conf);
   }
 
+  @Override
   public Decompressor createDecompressor() {
     return ZlibFactory.getZlibDecompressor(conf);
   }
   
+  @Override
   public String getDefaultExtension() {
     return ".deflate";
   }