You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ss...@apache.org on 2012/10/16 02:03:31 UTC

svn commit: r1398581 [7/14] - in /hadoop/common/branches/MR-3902/hadoop-common-project: hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src...

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java Tue Oct 16 00:02:55 2012
@@ -258,6 +258,7 @@ public class Lz4Decompressor implements 
     return 0;
   }
 
+  @Override
   public synchronized void reset() {
     finished = false;
     compressedDirectBufLen = 0;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java Tue Oct 16 00:02:55 2012
@@ -26,6 +26,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.Compressor;
+import org.apache.hadoop.util.NativeCodeLoader;
 
 /**
  * A {@link Compressor} based on the snappy compression algorithm.
@@ -51,22 +52,24 @@ public class SnappyCompressor implements
   private long bytesRead = 0L;
   private long bytesWritten = 0L;
 
-
+  private static boolean nativeSnappyLoaded = false;
+  
   static {
-    if (LoadSnappy.isLoaded()) {
-      // Initialize the native library
+    if (NativeCodeLoader.isNativeCodeLoaded() &&
+        NativeCodeLoader.buildSupportsSnappy()) {
       try {
         initIDs();
+        nativeSnappyLoaded = true;
       } catch (Throwable t) {
-        // Ignore failure to load/initialize snappy
-        LOG.warn(t.toString());
+        LOG.error("failed to load SnappyCompressor", t);
       }
-    } else {
-      LOG.error("Cannot load " + SnappyCompressor.class.getName() +
-          " without snappy library!");
     }
   }
-
+  
+  public static boolean isNativeCodeLoaded() {
+    return nativeSnappyLoaded;
+  }
+  
   /**
    * Creates a new compressor.
    *

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java Tue Oct 16 00:02:55 2012
@@ -25,6 +25,7 @@ import java.nio.ByteBuffer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.NativeCodeLoader;
 
 /**
  * A {@link Decompressor} based on the snappy compression algorithm.
@@ -47,21 +48,24 @@ public class SnappyDecompressor implemen
   private int userBufOff = 0, userBufLen = 0;
   private boolean finished;
 
+  private static boolean nativeSnappyLoaded = false;
+
   static {
-    if (LoadSnappy.isLoaded()) {
-      // Initialize the native library
+    if (NativeCodeLoader.isNativeCodeLoaded() &&
+        NativeCodeLoader.buildSupportsSnappy()) {
       try {
         initIDs();
+        nativeSnappyLoaded = true;
       } catch (Throwable t) {
-        // Ignore failure to load/initialize snappy
-        LOG.warn(t.toString());
+        LOG.error("failed to load SnappyDecompressor", t);
       }
-    } else {
-      LOG.error("Cannot load " + SnappyDecompressor.class.getName() +
-          " without snappy library!");
     }
   }
-
+  
+  public static boolean isNativeCodeLoaded() {
+    return nativeSnappyLoaded;
+  }
+  
   /**
    * Creates a new compressor.
    *
@@ -257,6 +261,7 @@ public class SnappyDecompressor implemen
     return 0;
   }
 
+  @Override
   public synchronized void reset() {
     finished = false;
     compressedDirectBufLen = 0;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java Tue Oct 16 00:02:55 2012
@@ -122,7 +122,7 @@ public class BuiltInGzipDecompressor imp
     //        in the first buffer load?  (But how else would one do it?)
   }
 
-  /** {@inheritDoc} */
+  @Override
   public synchronized boolean needsInput() {
     if (state == GzipStateLabel.DEFLATE_STREAM) {  // most common case
       return inflater.needsInput();
@@ -144,6 +144,7 @@ public class BuiltInGzipDecompressor imp
    *        the bulk deflate stream, which is a performance hit we don't want
    *        to absorb.  (Decompressor now documents this requirement.)
    */
+  @Override
   public synchronized void setInput(byte[] b, int off, int len) {
     if (b == null) {
       throw new NullPointerException();
@@ -175,6 +176,7 @@ public class BuiltInGzipDecompressor imp
    * methods below), the deflate stream is never copied; Inflater operates
    * directly on the user's buffer.
    */
+  @Override
   public synchronized int decompress(byte[] b, int off, int len)
   throws IOException {
     int numAvailBytes = 0;
@@ -421,16 +423,17 @@ public class BuiltInGzipDecompressor imp
    *
    * @return the total (non-negative) number of unprocessed bytes in input
    */
+  @Override
   public synchronized int getRemaining() {
     return userBufLen;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public synchronized boolean needsDictionary() {
     return inflater.needsDictionary();
   }
 
-  /** {@inheritDoc} */
+  @Override
   public synchronized void setDictionary(byte[] b, int off, int len) {
     inflater.setDictionary(b, off, len);
   }
@@ -439,6 +442,7 @@ public class BuiltInGzipDecompressor imp
    * Returns true if the end of the gzip substream (single "member") has been
    * reached.</p>
    */
+  @Override
   public synchronized boolean finished() {
     return (state == GzipStateLabel.FINISHED);
   }
@@ -447,6 +451,7 @@ public class BuiltInGzipDecompressor imp
    * Resets everything, including the input buffer, regardless of whether the
    * current gzip substream is finished.</p>
    */
+  @Override
   public synchronized void reset() {
     // could optionally emit INFO message if state != GzipStateLabel.FINISHED
     inflater.reset();
@@ -463,7 +468,7 @@ public class BuiltInGzipDecompressor imp
     hasHeaderCRC = false;
   }
 
-  /** {@inheritDoc} */
+  @Override
   public synchronized void end() {
     inflater.end();
   }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java Tue Oct 16 00:02:55 2012
@@ -48,6 +48,7 @@ public class BuiltInZlibDeflater extends
     super();
   }
 
+  @Override
   public synchronized int compress(byte[] b, int off, int len) 
     throws IOException {
     return super.deflate(b, off, len);

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibInflater.java Tue Oct 16 00:02:55 2012
@@ -39,6 +39,7 @@ public class BuiltInZlibInflater extends
     super();
   }
 
+  @Override
   public synchronized int decompress(byte[] b, int off, int len) 
     throws IOException {
     try {

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java Tue Oct 16 00:02:55 2012
@@ -259,6 +259,7 @@ public class ZlibCompressor implements C
     }
   }
 
+  @Override
   public synchronized void setInput(byte[] b, int off, int len) {
     if (b== null) {
       throw new NullPointerException();
@@ -287,6 +288,7 @@ public class ZlibCompressor implements C
     uncompressedDirectBufLen = uncompressedDirectBuf.position();
   }
 
+  @Override
   public synchronized void setDictionary(byte[] b, int off, int len) {
     if (stream == 0 || b == null) {
       throw new NullPointerException();
@@ -297,6 +299,7 @@ public class ZlibCompressor implements C
     setDictionary(stream, b, off, len);
   }
 
+  @Override
   public synchronized boolean needsInput() {
     // Consume remaining compressed data?
     if (compressedDirectBuf.remaining() > 0) {
@@ -325,16 +328,19 @@ public class ZlibCompressor implements C
     return false;
   }
   
+  @Override
   public synchronized void finish() {
     finish = true;
   }
   
+  @Override
   public synchronized boolean finished() {
     // Check if 'zlib' says its 'finished' and
     // all compressed data has been consumed
     return (finished && compressedDirectBuf.remaining() == 0);
   }
 
+  @Override
   public synchronized int compress(byte[] b, int off, int len) 
     throws IOException {
     if (b == null) {
@@ -385,6 +391,7 @@ public class ZlibCompressor implements C
    *
    * @return the total (non-negative) number of compressed bytes output so far
    */
+  @Override
   public synchronized long getBytesWritten() {
     checkStream();
     return getBytesWritten(stream);
@@ -395,11 +402,13 @@ public class ZlibCompressor implements C
    *
    * @return the total (non-negative) number of uncompressed bytes input so far
    */
+  @Override
   public synchronized long getBytesRead() {
     checkStream();
     return getBytesRead(stream);
   }
 
+  @Override
   public synchronized void reset() {
     checkStream();
     reset(stream);
@@ -413,6 +422,7 @@ public class ZlibCompressor implements C
     userBufOff = userBufLen = 0;
   }
   
+  @Override
   public synchronized void end() {
     if (stream != 0) {
       end(stream);

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java Tue Oct 16 00:02:55 2012
@@ -118,6 +118,7 @@ public class ZlibDecompressor implements
     this(CompressionHeader.DEFAULT_HEADER, DEFAULT_DIRECT_BUFFER_SIZE);
   }
 
+  @Override
   public synchronized void setInput(byte[] b, int off, int len) {
     if (b == null) {
       throw new NullPointerException();
@@ -154,6 +155,7 @@ public class ZlibDecompressor implements
     userBufLen -= compressedDirectBufLen;
   }
 
+  @Override
   public synchronized void setDictionary(byte[] b, int off, int len) {
     if (stream == 0 || b == null) {
       throw new NullPointerException();
@@ -165,6 +167,7 @@ public class ZlibDecompressor implements
     needDict = false;
   }
 
+  @Override
   public synchronized boolean needsInput() {
     // Consume remaining compressed data?
     if (uncompressedDirectBuf.remaining() > 0) {
@@ -184,16 +187,19 @@ public class ZlibDecompressor implements
     return false;
   }
 
+  @Override
   public synchronized boolean needsDictionary() {
     return needDict;
   }
 
+  @Override
   public synchronized boolean finished() {
     // Check if 'zlib' says it's 'finished' and
     // all compressed data has been consumed
     return (finished && uncompressedDirectBuf.remaining() == 0);
   }
 
+  @Override
   public synchronized int decompress(byte[] b, int off, int len) 
     throws IOException {
     if (b == null) {
@@ -255,6 +261,7 @@ public class ZlibDecompressor implements
    *
    * @return the total (non-negative) number of unprocessed bytes in input
    */
+  @Override
   public synchronized int getRemaining() {
     checkStream();
     return userBufLen + getRemaining(stream);  // userBuf + compressedDirectBuf
@@ -263,6 +270,7 @@ public class ZlibDecompressor implements
   /**
    * Resets everything including the input buffers (user and direct).</p>
    */
+  @Override
   public synchronized void reset() {
     checkStream();
     reset(stream);
@@ -274,6 +282,7 @@ public class ZlibDecompressor implements
     userBufOff = userBufLen = 0;
   }
 
+  @Override
   public synchronized void end() {
     if (stream != 0) {
       end(stream);
@@ -281,6 +290,7 @@ public class ZlibDecompressor implements
     }
   }
 
+  @Override
   protected void finalize() {
     end();
   }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java Tue Oct 16 00:02:55 2012
@@ -300,6 +300,7 @@ final class BCFile {
      * Close the BCFile Writer. Attempting to use the Writer after calling
      * <code>close</code> is not allowed and may lead to undetermined results.
      */
+    @Override
     public void close() throws IOException {
       if (closed == true) {
         return;
@@ -447,6 +448,7 @@ final class BCFile {
         this.compressAlgo = compressAlgo;
       }
 
+      @Override
       public void register(long raw, long begin, long end) {
         metaIndex.addEntry(new MetaIndexEntry(name, compressAlgo,
             new BlockRegion(begin, end - begin, raw)));
@@ -463,6 +465,7 @@ final class BCFile {
         // do nothing
       }
 
+      @Override
       public void register(long raw, long begin, long end) {
         dataIndex.addBlockRegion(new BlockRegion(begin, end - begin, raw));
       }
@@ -671,6 +674,7 @@ final class BCFile {
     /**
      * Finishing reading the BCFile. Release all resources.
      */
+    @Override
     public void close() {
       // nothing to be done now
     }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/CompareUtils.java Tue Oct 16 00:02:55 2012
@@ -68,6 +68,7 @@ class CompareUtils {
       magnitude = m;
     }
 
+    @Override
     public long magnitude() {
       return magnitude;
     }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java Tue Oct 16 00:02:55 2012
@@ -297,6 +297,7 @@ public class TFile {
      * 
      * The underlying FSDataOutputStream is not closed.
      */
+    @Override
     public void close() throws IOException {
       if ((state == State.CLOSED)) {
         return;
@@ -820,6 +821,7 @@ public class TFile {
      * Close the reader. The state of the Reader object is undefined after
      * close. Calling close() for multiple times has no effect.
      */
+    @Override
     public void close() throws IOException {
       readerBCF.close();
     }
@@ -1573,6 +1575,7 @@ public class TFile {
        * scanner after calling close is not defined. The entry returned by the
        * previous entry() call will be invalid.
        */
+      @Override
       public void close() throws IOException {
         parkCursorAtEnd();
       }
@@ -2102,7 +2105,7 @@ public class TFile {
     }
 
     public boolean isSorted() {
-      return !strComparator.equals("");
+      return !strComparator.isEmpty();
     }
 
     public String getComparatorString() {

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java Tue Oct 16 00:02:55 2012
@@ -125,7 +125,7 @@ class TFileDumper {
           dataSizeUncompressed += region.getRawSize();
         }
         properties.put("Data Block Bytes", Long.toString(dataSize));
-        if (reader.readerBCF.getDefaultCompressionName() != "none") {
+        if (!reader.readerBCF.getDefaultCompressionName().equals("none")) {
           properties.put("Data Block Uncompressed Bytes", Long
               .toString(dataSizeUncompressed));
           properties.put("Data Block Compression Ratio", String.format(

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Tue Oct 16 00:02:55 2012
@@ -202,6 +202,7 @@ public class NativeIO {
       this.mode = mode;
     }
 
+    @Override
     public String toString() {
       return "Stat(owner='" + owner + "', group='" + group + "'" +
         ", mode=" + mode + ")";

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java Tue Oct 16 00:02:55 2012
@@ -38,6 +38,7 @@ public class NativeIOException extends I
     return errno;
   }
 
+  @Override
   public String toString() {
     return errno.toString() + ": " + super.getMessage();
   }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java Tue Oct 16 00:02:55 2012
@@ -150,6 +150,7 @@ public class RetryPolicies {
   }
   
   static class TryOnceThenFail implements RetryPolicy {
+    @Override
     public RetryAction shouldRetry(Exception e, int retries, int failovers,
         boolean isMethodIdempotent) throws Exception {
       return RetryAction.FAIL;
@@ -157,6 +158,7 @@ public class RetryPolicies {
   }
   
   static class RetryForever implements RetryPolicy {
+    @Override
     public RetryAction shouldRetry(Exception e, int retries, int failovers,
         boolean isMethodIdempotent) throws Exception {
       return RetryAction.RETRY;
@@ -430,6 +432,7 @@ public class RetryPolicies {
       this.exceptionToPolicyMap = exceptionToPolicyMap;
     }
 
+    @Override
     public RetryAction shouldRetry(Exception e, int retries, int failovers,
         boolean isMethodIdempotent) throws Exception {
       RetryPolicy policy = exceptionToPolicyMap.get(e.getClass());
@@ -457,6 +460,7 @@ public class RetryPolicies {
       }
     }
 
+    @Override
     public RetryAction shouldRetry(Exception e, int retries, int failovers,
         boolean isMethodIdempotent) throws Exception {
       RetryPolicy policy = null;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java Tue Oct 16 00:02:55 2012
@@ -56,6 +56,7 @@ public abstract class DeserializerCompar
     this.deserializer.open(buffer);
   }
 
+  @Override
   public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
     try {
       

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java Tue Oct 16 00:02:55 2012
@@ -24,11 +24,8 @@ import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
 import java.io.OutputStream;
 import java.io.Serializable;
-import java.util.Map;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.RawComparator;
 
 /**
  * <p>
@@ -45,6 +42,7 @@ public class JavaSerialization implement
 
     private ObjectInputStream ois;
 
+    @Override
     public void open(InputStream in) throws IOException {
       ois = new ObjectInputStream(in) {
         @Override protected void readStreamHeader() {
@@ -53,6 +51,7 @@ public class JavaSerialization implement
       };
     }
     
+    @Override
     @SuppressWarnings("unchecked")
     public T deserialize(T object) throws IOException {
       try {
@@ -63,6 +62,7 @@ public class JavaSerialization implement
       }
     }
 
+    @Override
     public void close() throws IOException {
       ois.close();
     }
@@ -74,6 +74,7 @@ public class JavaSerialization implement
 
     private ObjectOutputStream oos;
 
+    @Override
     public void open(OutputStream out) throws IOException {
       oos = new ObjectOutputStream(out) {
         @Override protected void writeStreamHeader() {
@@ -82,27 +83,32 @@ public class JavaSerialization implement
       };
     }
 
+    @Override
     public void serialize(Serializable object) throws IOException {
       oos.reset(); // clear (class) back-references
       oos.writeObject(object);
     }
 
+    @Override
     public void close() throws IOException {
       oos.close();
     }
 
   }
 
+  @Override
   @InterfaceAudience.Private
   public boolean accept(Class<?> c) {
     return Serializable.class.isAssignableFrom(c);
   }
 
+  @Override
   @InterfaceAudience.Private
   public Deserializer<Serializable> getDeserializer(Class<Serializable> c) {
     return new JavaSerializationDeserializer<Serializable>();
   }
 
+  @Override
   @InterfaceAudience.Private
   public Serializer<Serializable> getSerializer(Class<Serializable> c) {
     return new JavaSerializationSerializer();

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java Tue Oct 16 00:02:55 2012
@@ -44,6 +44,7 @@ public class JavaSerializationComparator
     super(new JavaSerialization.JavaSerializationDeserializer<T>());
   }
 
+  @Override
   @InterfaceAudience.Private
   public int compare(T o1, T o2) {
     return o1.compareTo(o2);

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java Tue Oct 16 00:02:55 2012
@@ -40,12 +40,12 @@ import org.apache.hadoop.util.Reflection
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class SerializationFactory extends Configured {
-  
-  private static final Log LOG =
+
+  static final Log LOG =
     LogFactory.getLog(SerializationFactory.class.getName());
 
   private List<Serialization<?>> serializations = new ArrayList<Serialization<?>>();
-  
+
   /**
    * <p>
    * Serializations are found by reading the <code>io.serializations</code>
@@ -55,15 +55,21 @@ public class SerializationFactory extend
    */
   public SerializationFactory(Configuration conf) {
     super(conf);
-    for (String serializerName : conf.getStrings(
-      CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
-      new String[]{WritableSerialization.class.getName(),
-        AvroSpecificSerialization.class.getName(),
-        AvroReflectSerialization.class.getName()})) {
-      add(conf, serializerName);
+    if (conf.get(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY).equals("")) {
+      LOG.warn("Serialization for various data types may not be available. Please configure "
+          + CommonConfigurationKeys.IO_SERIALIZATIONS_KEY
+          + " properly to have serialization support (it is currently not set).");
+    } else {
+      for (String serializerName : conf.getStrings(
+          CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, new String[] {
+              WritableSerialization.class.getName(),
+              AvroSpecificSerialization.class.getName(),
+              AvroReflectSerialization.class.getName() })) {
+        add(conf, serializerName);
+      }
     }
   }
-  
+
   @SuppressWarnings("unchecked")
   private void add(Configuration conf, String serializationName) {
     try {
@@ -101,5 +107,5 @@ public class SerializationFactory extend
     }
     return null;
   }
-  
+
 }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/WritableSerialization.java Tue Oct 16 00:02:55 2012
@@ -23,8 +23,6 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.util.Map;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java Tue Oct 16 00:02:55 2012
@@ -47,11 +47,13 @@ public abstract class AvroSerialization<
   @InterfaceAudience.Private
   public static final String AVRO_SCHEMA_KEY = "Avro-Schema";
 
+  @Override
   @InterfaceAudience.Private
   public Deserializer<T> getDeserializer(Class<T> c) {
     return new AvroDeserializer(c);
   }
 
+  @Override
   @InterfaceAudience.Private
   public Serializer<T> getSerializer(Class<T> c) {
     return new AvroSerializer(c);

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Tue Oct 16 00:02:55 2012
@@ -83,6 +83,8 @@ import org.apache.hadoop.util.Time;
  * 
  * @see Server
  */
+@InterfaceAudience.LimitedPrivate(value = { "Common", "HDFS", "MapReduce", "Yarn" })
+@InterfaceStability.Evolving
 public class Client {
   
   public static final Log LOG = LogFactory.getLog(Client.class);
@@ -223,7 +225,6 @@ public class Client {
     private IpcConnectionContextProto connectionContext;   // connection context
     private final ConnectionId remoteId;                // connection id
     private AuthMethod authMethod; // authentication method
-    private boolean useSasl;
     private Token<? extends TokenIdentifier> token;
     private SaslRpcClient saslRpcClient;
     
@@ -268,8 +269,7 @@ public class Client {
 
       UserGroupInformation ticket = remoteId.getTicket();
       Class<?> protocol = remoteId.getProtocol();
-      this.useSasl = UserGroupInformation.isSecurityEnabled();
-      if (useSasl && protocol != null) {
+      if (protocol != null) {
         TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol, conf);
         if (tokenInfo != null) {
           TokenSelector<? extends TokenIdentifier> tokenSelector = null;
@@ -294,12 +294,12 @@ public class Client {
         }
       }
       
-      if (!useSasl) {
-        authMethod = AuthMethod.SIMPLE;
-      } else if (token != null) {
+      if (token != null) {
         authMethod = AuthMethod.DIGEST;
-      } else {
+      } else if (UserGroupInformation.isSecurityEnabled()) {
         authMethod = AuthMethod.KERBEROS;
+      } else {
+        authMethod = AuthMethod.SIMPLE;
       }
       
       connectionContext = ProtoUtil.makeIpcConnectionContext(
@@ -364,6 +364,7 @@ public class Client {
        * until a byte is read.
        * @throws IOException for any IO problem other than socket timeout
        */
+      @Override
       public int read() throws IOException {
         do {
           try {
@@ -380,6 +381,7 @@ public class Client {
        * 
        * @return the total number of bytes read; -1 if the connection is closed.
        */
+      @Override
       public int read(byte[] buf, int off, int len) throws IOException {
         do {
           try {
@@ -510,6 +512,7 @@ public class Client {
         final Random rand, final UserGroupInformation ugi) throws IOException,
         InterruptedException {
       ugi.doAs(new PrivilegedExceptionAction<Object>() {
+        @Override
         public Object run() throws IOException, InterruptedException {
           final short MAX_BACKOFF = 5000;
           closeConnection();
@@ -571,14 +574,12 @@ public class Client {
           InputStream inStream = NetUtils.getInputStream(socket);
           OutputStream outStream = NetUtils.getOutputStream(socket);
           writeConnectionHeader(outStream);
-          if (useSasl) {
+          if (authMethod != AuthMethod.SIMPLE) {
             final InputStream in2 = inStream;
             final OutputStream out2 = outStream;
             UserGroupInformation ticket = remoteId.getTicket();
-            if (authMethod == AuthMethod.KERBEROS) {
-              if (ticket.getRealUser() != null) {
-                ticket = ticket.getRealUser();
-              }
+            if (ticket.getRealUser() != null) {
+              ticket = ticket.getRealUser();
             }
             boolean continueSasl = false;
             try {
@@ -609,7 +610,6 @@ public class Client {
                   connectionContext.getProtocol(), 
                   ProtoUtil.getUgi(connectionContext.getUserInfo()),
                   authMethod);
-              useSasl = false;
             }
           }
         
@@ -803,6 +803,7 @@ public class Client {
       }
     }
 
+    @Override
     public void run() {
       if (LOG.isDebugEnabled())
         LOG.debug(getName() + ": starting, having connections " 
@@ -1168,7 +1169,7 @@ public class Client {
                   call.error);
         }
       } else {
-        return call.rpcResponse;
+        return call.getRpcResult();
       }
     }
   }

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Tue Oct 16 00:02:55 2012
@@ -240,6 +240,7 @@ public class ProtobufRpcEngine implement
       return returnMessage;
     }
 
+    @Override
     public void close() throws IOException {
       if (!isClosed) {
         isClosed = true;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java Tue Oct 16 00:02:55 2012
@@ -19,7 +19,6 @@
 package org.apache.hadoop.ipc;
 
 import java.io.IOException;
-import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.util.HashSet;
 

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java Tue Oct 16 00:02:55 2012
@@ -36,7 +36,8 @@ public class ProtocolSignature implement
     WritableFactories.setFactory
       (ProtocolSignature.class,
        new WritableFactory() {
-         public Writable newInstance() { return new ProtocolSignature(); }
+         @Override
+        public Writable newInstance() { return new ProtocolSignature(); }
        });
   }
 

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Tue Oct 16 00:02:55 2012
@@ -48,6 +48,8 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
@@ -72,6 +74,8 @@ import com.google.protobuf.BlockingServi
  * All methods in the protocol should throw only IOException.  No field data of
  * the protocol instance is transmitted.
  */
+@InterfaceAudience.LimitedPrivate(value = { "Common", "HDFS", "MapReduce", "Yarn" })
+@InterfaceStability.Evolving
 public class RPC {
   public enum RpcKind {
     RPC_BUILTIN ((short) 1),         // Used for built in calls by tests
@@ -629,7 +633,7 @@ public class RPC {
 
   /** Construct a server for a protocol implementation instance listening on a
    * port and address.
-   * @deprecated protocol interface should be passed.
+   * @deprecated Please use {@link Builder} to build the {@link Server}
    */
   @Deprecated
   public static Server getServer(final Object instance, final String bindAddress, final int port, Configuration conf) 
@@ -639,7 +643,7 @@ public class RPC {
 
   /** Construct a server for a protocol implementation instance listening on a
    * port and address.
-   * @deprecated protocol interface should be passed.
+   * @deprecated Please use {@link Builder} to build the {@link Server}
    */
   @Deprecated
   public static Server getServer(final Object instance, final String bindAddress, final int port,
@@ -651,7 +655,10 @@ public class RPC {
                      null);
   }
 
-  /** Construct a server for a protocol implementation instance. */
+  /** Construct a server for a protocol implementation instance.
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static Server getServer(Class<?> protocol,
                                  Object instance, String bindAddress,
                                  int port, Configuration conf) 
@@ -661,7 +668,7 @@ public class RPC {
   }
 
   /** Construct a server for a protocol implementation instance.
-   * @deprecated secretManager should be passed.
+   * @deprecated Please use {@link Builder} to build the {@link Server}
    */
   @Deprecated
   public static Server getServer(Class<?> protocol,
@@ -674,7 +681,10 @@ public class RPC {
                  conf, null, null);
   }
   
-  /** Construct a server for a protocol implementation instance. */
+  /** Construct a server for a protocol implementation instance. 
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static Server getServer(Class<?> protocol,
                                  Object instance, String bindAddress, int port,
                                  int numHandlers,
@@ -685,6 +695,10 @@ public class RPC {
         conf, secretManager, null);
   }
   
+  /**
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static Server getServer(Class<?> protocol,
       Object instance, String bindAddress, int port,
       int numHandlers,
@@ -697,8 +711,10 @@ public class RPC {
                  verbose, conf, secretManager, portRangeConfig);
   }
 
-  /** Construct a server for a protocol implementation instance. */
-
+  /** Construct a server for a protocol implementation instance.
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static <PROTO extends VersionedProtocol, IMPL extends PROTO> 
         Server getServer(Class<PROTO> protocol,
                                  IMPL instance, String bindAddress, int port,
@@ -713,6 +729,110 @@ public class RPC {
                  null);
   }
 
+  /**
+   * Class to construct instances of RPC server with specific options.
+   */
+  public static class Builder {
+    private Class<?> protocol = null;
+    private Object instance = null;
+    private String bindAddress = "0.0.0.0";
+    private int port = 0;
+    private int numHandlers = 1;
+    private int numReaders = -1;
+    private int queueSizePerHandler = -1;
+    private boolean verbose = false;
+    private final Configuration conf;    
+    private SecretManager<? extends TokenIdentifier> secretManager = null;
+    private String portRangeConfig = null;
+    
+    public Builder(Configuration conf) {
+      this.conf = conf;
+    }
+
+    /** Mandatory field */
+    public Builder setProtocol(Class<?> protocol) {
+      this.protocol = protocol;
+      return this;
+    }
+    
+    /** Mandatory field */
+    public Builder setInstance(Object instance) {
+      this.instance = instance;
+      return this;
+    }
+    
+    /** Default: 0.0.0.0 */
+    public Builder setBindAddress(String bindAddress) {
+      this.bindAddress = bindAddress;
+      return this;
+    }
+    
+    /** Default: 0 */
+    public Builder setPort(int port) {
+      this.port = port;
+      return this;
+    }
+    
+    /** Default: 1 */
+    public Builder setNumHandlers(int numHandlers) {
+      this.numHandlers = numHandlers;
+      return this;
+    }
+    
+    /** Default: -1 */
+    public Builder setnumReaders(int numReaders) {
+      this.numReaders = numReaders;
+      return this;
+    }
+    
+    /** Default: -1 */
+    public Builder setQueueSizePerHandler(int queueSizePerHandler) {
+      this.queueSizePerHandler = queueSizePerHandler;
+      return this;
+    }
+    
+    /** Default: false */
+    public Builder setVerbose(boolean verbose) {
+      this.verbose = verbose;
+      return this;
+    }
+    
+    /** Default: null */
+    public Builder setSecretManager(
+        SecretManager<? extends TokenIdentifier> secretManager) {
+      this.secretManager = secretManager;
+      return this;
+    }
+    
+    /** Default: null */
+    public Builder setPortRangeConfig(String portRangeConfig) {
+      this.portRangeConfig = portRangeConfig;
+      return this;
+    }
+    
+    /**
+     * Build the RPC Server. 
+     * @throws IOException on error
+     * @throws HadoopIllegalArgumentException when mandatory fields are not set
+     */
+    public Server build() throws IOException, HadoopIllegalArgumentException {
+      if (this.conf == null) {
+        throw new HadoopIllegalArgumentException("conf is not set");
+      }
+      if (this.protocol == null) {
+        throw new HadoopIllegalArgumentException("protocol is not set");
+      }
+      if (this.instance == null) {
+        throw new HadoopIllegalArgumentException("instance is not set");
+      }
+      
+      return getProtocolEngine(this.protocol, this.conf).getServer(
+          this.protocol, this.instance, this.bindAddress, this.port,
+          this.numHandlers, this.numReaders, this.queueSizePerHandler,
+          this.verbose, this.conf, this.secretManager, this.portRangeConfig);
+    }
+  }
+  
   /** An RPC Server. */
   public abstract static class Server extends org.apache.hadoop.ipc.Server {
    boolean verbose;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Tue Oct 16 00:02:55 2012
@@ -64,6 +64,7 @@ import javax.security.sasl.SaslServer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -86,7 +87,6 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
@@ -107,6 +107,8 @@ import com.google.common.annotations.Vis
  * 
  * @see Client
  */
+@InterfaceAudience.LimitedPrivate(value = { "Common", "HDFS", "MapReduce", "Yarn" })
+@InterfaceStability.Evolving
 public abstract class Server {
   private final boolean authorize;
   private boolean isSecurityEnabled;
@@ -517,6 +519,7 @@ public abstract class Server {
         this.readSelector = Selector.open();
       }
       
+      @Override
       public void run() {
         LOG.info("Starting " + getName());
         try {
@@ -1370,20 +1373,38 @@ public abstract class Server {
           dataLengthBuffer.clear();
           if (authMethod == null) {
             throw new IOException("Unable to read authentication method");
-          }
-          if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) {
-            AccessControlException ae = new AccessControlException("Authorization ("
-              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
-              + ") is enabled but authentication ("
-              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
-              + ") is configured as simple. Please configure another method "
-              + "like kerberos or digest.");
-            setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL,
-                null, ae.getClass().getName(), ae.getMessage());
-            responder.doRespond(authFailedCall);
-            throw ae;
-          }
-          if (!isSecurityEnabled && authMethod != AuthMethod.SIMPLE) {
+          }          
+          final boolean clientUsingSasl;
+          switch (authMethod) {
+            case SIMPLE: { // no sasl for simple
+              if (isSecurityEnabled) {
+                AccessControlException ae = new AccessControlException("Authorization ("
+                    + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
+                    + ") is enabled but authentication ("
+                    + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
+                    + ") is configured as simple. Please configure another method "
+                    + "like kerberos or digest.");
+                setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL,
+                    null, ae.getClass().getName(), ae.getMessage());
+                responder.doRespond(authFailedCall);
+                throw ae;
+              }
+              clientUsingSasl = false;
+              useSasl = false; 
+              break;
+            }
+            case DIGEST: {
+              clientUsingSasl = true;
+              useSasl = (secretManager != null);
+              break;
+            }
+            default: {
+              clientUsingSasl = true;
+              useSasl = isSecurityEnabled; 
+              break;
+            }
+          }          
+          if (clientUsingSasl && !useSasl) {
             doSaslReply(SaslStatus.SUCCESS, new IntWritable(
                 SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
             authMethod = AuthMethod.SIMPLE;
@@ -1392,9 +1413,6 @@ public abstract class Server {
             // to simple auth from now on.
             skipInitialSaslHandshake = true;
           }
-          if (authMethod != AuthMethod.SIMPLE) {
-            useSasl = true;
-          }
           
           connectionHeaderBuf = null;
           connectionHeaderRead = true;
@@ -1528,8 +1546,6 @@ public abstract class Server {
             UserGroupInformation realUser = user;
             user = UserGroupInformation.createProxyUser(protocolUser
                 .getUserName(), realUser);
-            // Now the user is a proxy user, set Authentication method Proxy.
-            user.setAuthenticationMethod(AuthenticationMethod.PROXY);
           }
         }
       }
@@ -1879,7 +1895,7 @@ public abstract class Server {
     // Create the responder here
     responder = new Responder();
     
-    if (isSecurityEnabled) {
+    if (secretManager != null) {
       SaslRpcServer.init(conf);
     }
     

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Tue Oct 16 00:02:55 2012
@@ -142,6 +142,7 @@ public class WritableRpcEngine implement
       return rpcVersion;
     }
 
+    @Override
     @SuppressWarnings("deprecation")
     public void readFields(DataInput in) throws IOException {
       rpcVersion = in.readLong();
@@ -159,6 +160,7 @@ public class WritableRpcEngine implement
       }
     }
 
+    @Override
     @SuppressWarnings("deprecation")
     public void write(DataOutput out) throws IOException {
       out.writeLong(rpcVersion);
@@ -173,6 +175,7 @@ public class WritableRpcEngine implement
       }
     }
 
+    @Override
     public String toString() {
       StringBuilder buffer = new StringBuilder();
       buffer.append(methodName);
@@ -189,10 +192,12 @@ public class WritableRpcEngine implement
       return buffer.toString();
     }
 
+    @Override
     public void setConf(Configuration conf) {
       this.conf = conf;
     }
 
+    @Override
     public Configuration getConf() {
       return this.conf;
     }
@@ -215,6 +220,7 @@ public class WritableRpcEngine implement
       this.client = CLIENTS.getClient(conf, factory);
     }
 
+    @Override
     public Object invoke(Object proxy, Method method, Object[] args)
       throws Throwable {
       long startTime = 0;
@@ -232,6 +238,7 @@ public class WritableRpcEngine implement
     }
     
     /* close the IPC client that's responsible for this invoker's RPCs */ 
+    @Override
     synchronized public void close() {
       if (!isClosed) {
         isClosed = true;

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java Tue Oct 16 00:02:55 2012
@@ -88,6 +88,7 @@ public class LogLevel {
   public static class Servlet extends HttpServlet {
     private static final long serialVersionUID = 1L;
 
+    @Override
     public void doGet(HttpServletRequest request, HttpServletResponse response
         ) throws ServletException, IOException {
 

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ContextFactory.java Tue Oct 16 00:02:55 2012
@@ -1,211 +1,211 @@
-/*
- * ContextFactory.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics.spi.NullContext;
-
-/**
- * Factory class for creating MetricsContext objects.  To obtain an instance
- * of this class, use the static <code>getFactory()</code> method.
- */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-public class ContextFactory {
-    
-  private static final String PROPERTIES_FILE = 
-    "/hadoop-metrics.properties";
-  private static final String CONTEXT_CLASS_SUFFIX =
-    ".class";
-  private static final String DEFAULT_CONTEXT_CLASSNAME =
-    "org.apache.hadoop.metrics.spi.NullContext";
-    
-  private static ContextFactory theFactory = null;
-    
-  private Map<String,Object> attributeMap = new HashMap<String,Object>();
-  private Map<String,MetricsContext> contextMap = 
-    new HashMap<String,MetricsContext>();
-    
-  // Used only when contexts, or the ContextFactory itself, cannot be
-  // created.
-  private static Map<String,MetricsContext> nullContextMap = 
-    new HashMap<String,MetricsContext>();
-    
-  /** Creates a new instance of ContextFactory */
-  protected ContextFactory() {
-  }
-    
-  /**
-   * Returns the value of the named attribute, or null if there is no 
-   * attribute of that name.
-   *
-   * @param attributeName the attribute name
-   * @return the attribute value
-   */
-  public Object getAttribute(String attributeName) {
-    return attributeMap.get(attributeName);
-  }
-    
-  /**
-   * Returns the names of all the factory's attributes.
-   * 
-   * @return the attribute names
-   */
-  public String[] getAttributeNames() {
-    String[] result = new String[attributeMap.size()];
-    int i = 0;
-    // for (String attributeName : attributeMap.keySet()) {
-    Iterator it = attributeMap.keySet().iterator();
-    while (it.hasNext()) {
-      result[i++] = (String) it.next();
-    }
-    return result;
-  }
-    
-  /**
-   * Sets the named factory attribute to the specified value, creating it
-   * if it did not already exist.  If the value is null, this is the same as
-   * calling removeAttribute.
-   *
-   * @param attributeName the attribute name
-   * @param value the new attribute value
-   */
-  public void setAttribute(String attributeName, Object value) {
-    attributeMap.put(attributeName, value);
-  }
-
-  /**
-   * Removes the named attribute if it exists.
-   *
-   * @param attributeName the attribute name
-   */
-  public void removeAttribute(String attributeName) {
-    attributeMap.remove(attributeName);
-  }
-    
-  /**
-   * Returns the named MetricsContext instance, constructing it if necessary 
-   * using the factory's current configuration attributes. <p/>
-   * 
-   * When constructing the instance, if the factory property 
-   * <i>contextName</i>.class</code> exists, 
-   * its value is taken to be the name of the class to instantiate.  Otherwise,
-   * the default is to create an instance of 
-   * <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a 
-   * dummy "no-op" context which will cause all metric data to be discarded.
-   * 
-   * @param contextName the name of the context
-   * @return the named MetricsContext
-   */
-  public synchronized MetricsContext getContext(String refName, String contextName)
-      throws IOException, ClassNotFoundException,
-             InstantiationException, IllegalAccessException {
-    MetricsContext metricsContext = contextMap.get(refName);
-    if (metricsContext == null) {
-      String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
-      String className = (String) getAttribute(classNameAttribute);
-      if (className == null) {
-        className = DEFAULT_CONTEXT_CLASSNAME;
-      }
-      Class contextClass = Class.forName(className);
-      metricsContext = (MetricsContext) contextClass.newInstance();
-      metricsContext.init(contextName, this);
-      contextMap.put(contextName, metricsContext);
-    }
-    return metricsContext;
-  }
-
-  public synchronized MetricsContext getContext(String contextName)
-    throws IOException, ClassNotFoundException, InstantiationException,
-           IllegalAccessException {
-    return getContext(contextName, contextName);
-  }
-  
-  /** 
-   * Returns all MetricsContexts built by this factory.
-   */
-  public synchronized Collection<MetricsContext> getAllContexts() {
-    // Make a copy to avoid race conditions with creating new contexts.
-    return new ArrayList<MetricsContext>(contextMap.values());
-  }
-    
-  /**
-   * Returns a "null" context - one which does nothing.
-   */
-  public static synchronized MetricsContext getNullContext(String contextName) {
-    MetricsContext nullContext = nullContextMap.get(contextName);
-    if (nullContext == null) {
-      nullContext = new NullContext();
-      nullContextMap.put(contextName, nullContext);
-    }
-    return nullContext;
-  }
-    
-  /**
-   * Returns the singleton ContextFactory instance, constructing it if 
-   * necessary. <p/>
-   * 
-   * When the instance is constructed, this method checks if the file 
-   * <code>hadoop-metrics.properties</code> exists on the class path.  If it 
-   * exists, it must be in the format defined by java.util.Properties, and all 
-   * the properties in the file are set as attributes on the newly created
-   * ContextFactory instance.
-   *
-   * @return the singleton ContextFactory instance
-   */
-  public static synchronized ContextFactory getFactory() throws IOException {
-    if (theFactory == null) {
-      theFactory = new ContextFactory();
-      theFactory.setAttributes();
-    }
-    return theFactory;
-  }
-    
-  private void setAttributes() throws IOException {
-    InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
-    if (is != null) {
-      try {
-        Properties properties = new Properties();
-        properties.load(is);
-        //for (Object propertyNameObj : properties.keySet()) {
-        Iterator it = properties.keySet().iterator();
-        while (it.hasNext()) {
-          String propertyName = (String) it.next();
-          String propertyValue = properties.getProperty(propertyName);
-          setAttribute(propertyName, propertyValue);
-        }
-      } finally {
-        is.close();
-      }
-    }
-  }
-    
-}
+/*
+ * ContextFactory.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.spi.NullContext;
+
+/**
+ * Factory class for creating MetricsContext objects.  To obtain an instance
+ * of this class, use the static <code>getFactory()</code> method.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class ContextFactory {
+    
+  private static final String PROPERTIES_FILE = 
+    "/hadoop-metrics.properties";
+  private static final String CONTEXT_CLASS_SUFFIX =
+    ".class";
+  private static final String DEFAULT_CONTEXT_CLASSNAME =
+    "org.apache.hadoop.metrics.spi.NullContext";
+    
+  private static ContextFactory theFactory = null;
+    
+  private Map<String,Object> attributeMap = new HashMap<String,Object>();
+  private Map<String,MetricsContext> contextMap = 
+    new HashMap<String,MetricsContext>();
+    
+  // Used only when contexts, or the ContextFactory itself, cannot be
+  // created.
+  private static Map<String,MetricsContext> nullContextMap = 
+    new HashMap<String,MetricsContext>();
+    
+  /** Creates a new instance of ContextFactory */
+  protected ContextFactory() {
+  }
+    
+  /**
+   * Returns the value of the named attribute, or null if there is no 
+   * attribute of that name.
+   *
+   * @param attributeName the attribute name
+   * @return the attribute value
+   */
+  public Object getAttribute(String attributeName) {
+    return attributeMap.get(attributeName);
+  }
+    
+  /**
+   * Returns the names of all the factory's attributes.
+   * 
+   * @return the attribute names
+   */
+  public String[] getAttributeNames() {
+    String[] result = new String[attributeMap.size()];
+    int i = 0;
+    // for (String attributeName : attributeMap.keySet()) {
+    Iterator it = attributeMap.keySet().iterator();
+    while (it.hasNext()) {
+      result[i++] = (String) it.next();
+    }
+    return result;
+  }
+    
+  /**
+   * Sets the named factory attribute to the specified value, creating it
+   * if it did not already exist.  If the value is null, this is the same as
+   * calling removeAttribute.
+   *
+   * @param attributeName the attribute name
+   * @param value the new attribute value
+   */
+  public void setAttribute(String attributeName, Object value) {
+    attributeMap.put(attributeName, value);
+  }
+
+  /**
+   * Removes the named attribute if it exists.
+   *
+   * @param attributeName the attribute name
+   */
+  public void removeAttribute(String attributeName) {
+    attributeMap.remove(attributeName);
+  }
+    
+  /**
+   * Returns the named MetricsContext instance, constructing it if necessary 
+   * using the factory's current configuration attributes. <p/>
+   * 
+   * When constructing the instance, if the factory property 
+   * <i>contextName</i>.class</code> exists, 
+   * its value is taken to be the name of the class to instantiate.  Otherwise,
+   * the default is to create an instance of 
+   * <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a 
+   * dummy "no-op" context which will cause all metric data to be discarded.
+   * 
+   * @param contextName the name of the context
+   * @return the named MetricsContext
+   */
+  public synchronized MetricsContext getContext(String refName, String contextName)
+      throws IOException, ClassNotFoundException,
+             InstantiationException, IllegalAccessException {
+    MetricsContext metricsContext = contextMap.get(refName);
+    if (metricsContext == null) {
+      String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
+      String className = (String) getAttribute(classNameAttribute);
+      if (className == null) {
+        className = DEFAULT_CONTEXT_CLASSNAME;
+      }
+      Class contextClass = Class.forName(className);
+      metricsContext = (MetricsContext) contextClass.newInstance();
+      metricsContext.init(contextName, this);
+      contextMap.put(contextName, metricsContext);
+    }
+    return metricsContext;
+  }
+
+  public synchronized MetricsContext getContext(String contextName)
+    throws IOException, ClassNotFoundException, InstantiationException,
+           IllegalAccessException {
+    return getContext(contextName, contextName);
+  }
+  
+  /** 
+   * Returns all MetricsContexts built by this factory.
+   */
+  public synchronized Collection<MetricsContext> getAllContexts() {
+    // Make a copy to avoid race conditions with creating new contexts.
+    return new ArrayList<MetricsContext>(contextMap.values());
+  }
+    
+  /**
+   * Returns a "null" context - one which does nothing.
+   */
+  public static synchronized MetricsContext getNullContext(String contextName) {
+    MetricsContext nullContext = nullContextMap.get(contextName);
+    if (nullContext == null) {
+      nullContext = new NullContext();
+      nullContextMap.put(contextName, nullContext);
+    }
+    return nullContext;
+  }
+    
+  /**
+   * Returns the singleton ContextFactory instance, constructing it if 
+   * necessary. <p/>
+   * 
+   * When the instance is constructed, this method checks if the file 
+   * <code>hadoop-metrics.properties</code> exists on the class path.  If it 
+   * exists, it must be in the format defined by java.util.Properties, and all 
+   * the properties in the file are set as attributes on the newly created
+   * ContextFactory instance.
+   *
+   * @return the singleton ContextFactory instance
+   */
+  public static synchronized ContextFactory getFactory() throws IOException {
+    if (theFactory == null) {
+      theFactory = new ContextFactory();
+      theFactory.setAttributes();
+    }
+    return theFactory;
+  }
+    
+  private void setAttributes() throws IOException {
+    InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
+    if (is != null) {
+      try {
+        Properties properties = new Properties();
+        properties.load(is);
+        //for (Object propertyNameObj : properties.keySet()) {
+        Iterator it = properties.keySet().iterator();
+        while (it.hasNext()) {
+          String propertyName = (String) it.next();
+          String propertyValue = properties.getProperty(propertyName);
+          setAttribute(propertyName, propertyValue);
+        }
+      } finally {
+        is.close();
+      }
+    }
+  }
+    
+}

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsContext.java Tue Oct 16 00:02:55 2012
@@ -1,122 +1,122 @@
-/*
- * MetricsContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics.spi.OutputRecord;
-
-/**
- * The main interface to the metrics package. 
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public interface MetricsContext {
-    
-  /**
-   * Default period in seconds at which data is sent to the metrics system.
-   */
-  public static final int DEFAULT_PERIOD = 5;
-
-  /**
-   * Initialize this context.
-   * @param contextName The given name for this context
-   * @param factory The creator of this context
-   */
-  public void init(String contextName, ContextFactory factory);
-
-  /**
-   * Returns the context name.
-   *
-   * @return the context name
-   */
-  public abstract String getContextName();
-    
-  /**
-   * Starts or restarts monitoring, the emitting of metrics records as they are 
-   * updated. 
-   */
-  public abstract void startMonitoring()
-    throws IOException;
-
-  /**
-   * Stops monitoring.  This does not free any data that the implementation
-   * may have buffered for sending at the next timer event. It
-   * is OK to call <code>startMonitoring()</code> again after calling 
-   * this.
-   * @see #close()
-   */
-  public abstract void stopMonitoring();
-    
-  /**
-   * Returns true if monitoring is currently in progress.
-   */
-  public abstract boolean isMonitoring();
-    
-  /**
-   * Stops monitoring and also frees any buffered data, returning this 
-   * object to its initial state.  
-   */
-  public abstract void close();
-    
-  /**
-   * Creates a new MetricsRecord instance with the given <code>recordName</code>.
-   * Throws an exception if the metrics implementation is configured with a fixed
-   * set of record names and <code>recordName</code> is not in that set.
-   *
-   * @param recordName the name of the record
-   * @throws MetricsException if recordName conflicts with configuration data
-   */
-  public abstract MetricsRecord createRecord(String recordName);
-    
-  /**
-   * Registers a callback to be called at regular time intervals, as 
-   * determined by the implementation-class specific configuration.
-   *
-   * @param updater object to be run periodically; it should updated
-   * some metrics records and then return
-   */
-  public abstract void registerUpdater(Updater updater);
-
-  /**
-   * Removes a callback, if it exists.
-   * 
-   * @param updater object to be removed from the callback list
-   */
-  public abstract void unregisterUpdater(Updater updater);
-  
-  /**
-   * Returns the timer period.
-   */
-  public abstract int getPeriod();
-  
-  /**
-   * Retrieves all the records managed by this MetricsContext.
-   * Useful for monitoring systems that are polling-based.
-   * 
-   * @return A non-null map from all record names to the records managed.
-   */
-   Map<String, Collection<OutputRecord>> getAllRecords();
-}
+/*
+ * MetricsContext.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics.spi.OutputRecord;
+
+/**
+ * The main interface to the metrics package. 
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public interface MetricsContext {
+    
+  /**
+   * Default period in seconds at which data is sent to the metrics system.
+   */
+  public static final int DEFAULT_PERIOD = 5;
+
+  /**
+   * Initialize this context.
+   * @param contextName The given name for this context
+   * @param factory The creator of this context
+   */
+  public void init(String contextName, ContextFactory factory);
+
+  /**
+   * Returns the context name.
+   *
+   * @return the context name
+   */
+  public abstract String getContextName();
+    
+  /**
+   * Starts or restarts monitoring, the emitting of metrics records as they are 
+   * updated. 
+   */
+  public abstract void startMonitoring()
+    throws IOException;
+
+  /**
+   * Stops monitoring.  This does not free any data that the implementation
+   * may have buffered for sending at the next timer event. It
+   * is OK to call <code>startMonitoring()</code> again after calling 
+   * this.
+   * @see #close()
+   */
+  public abstract void stopMonitoring();
+    
+  /**
+   * Returns true if monitoring is currently in progress.
+   */
+  public abstract boolean isMonitoring();
+    
+  /**
+   * Stops monitoring and also frees any buffered data, returning this 
+   * object to its initial state.  
+   */
+  public abstract void close();
+    
+  /**
+   * Creates a new MetricsRecord instance with the given <code>recordName</code>.
+   * Throws an exception if the metrics implementation is configured with a fixed
+   * set of record names and <code>recordName</code> is not in that set.
+   *
+   * @param recordName the name of the record
+   * @throws MetricsException if recordName conflicts with configuration data
+   */
+  public abstract MetricsRecord createRecord(String recordName);
+    
+  /**
+   * Registers a callback to be called at regular time intervals, as 
+   * determined by the implementation-class specific configuration.
+   *
+   * @param updater object to be run periodically; it should updated
+   * some metrics records and then return
+   */
+  public abstract void registerUpdater(Updater updater);
+
+  /**
+   * Removes a callback, if it exists.
+   * 
+   * @param updater object to be removed from the callback list
+   */
+  public abstract void unregisterUpdater(Updater updater);
+  
+  /**
+   * Returns the timer period.
+   */
+  public abstract int getPeriod();
+  
+  /**
+   * Retrieves all the records managed by this MetricsContext.
+   * Useful for monitoring systems that are polling-based.
+   * 
+   * @return A non-null map from all record names to the records managed.
+   */
+   Map<String, Collection<OutputRecord>> getAllRecords();
+}

Modified: hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java?rev=1398581&r1=1398580&r2=1398581&view=diff
==============================================================================
--- hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java (original)
+++ hadoop/common/branches/MR-3902/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsException.java Tue Oct 16 00:02:55 2012
@@ -1,47 +1,47 @@
-/*
- * MetricsException.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * General-purpose, unchecked metrics exception.
- */
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-public class MetricsException extends RuntimeException {
-    
-  private static final long serialVersionUID = -1643257498540498497L;
-
-  /** Creates a new instance of MetricsException */
-  public MetricsException() {
-  }
-    
-  /** Creates a new instance of MetricsException 
-   *
-   * @param message an error message
-   */
-  public MetricsException(String message) {
-    super(message);
-  }
-    
-}
+/*
+ * MetricsException.java
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * General-purpose, unchecked metrics exception.
+ */
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public class MetricsException extends RuntimeException {
+    
+  private static final long serialVersionUID = -1643257498540498497L;
+
+  /** Creates a new instance of MetricsException */
+  public MetricsException() {
+  }
+    
+  /** Creates a new instance of MetricsException 
+   *
+   * @param message an error message
+   */
+  public MetricsException(String message) {
+    super(message);
+  }
+    
+}