You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2014/07/25 22:33:18 UTC

svn commit: r1613514 [2/4] - in /hadoop/common/branches/YARN-1051/hadoop-common-project: hadoop-auth/ hadoop-common/ hadoop-common/src/main/bin/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/crypto/key/ hadoop-common/src/ma...

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java Fri Jul 25 20:33:09 2014
@@ -24,6 +24,7 @@ import java.io.OutputStream;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
 
 /**
  * This class encapsulates a streaming compression/decompression pair.
@@ -113,4 +114,58 @@ public interface CompressionCodec {
    * @return the extension including the '.'
    */
   String getDefaultExtension();
+
+  static class Util {
+    /**
+     * Create an output stream with a codec taken from the global CodecPool.
+     *
+     * @param codec       The codec to use to create the output stream.
+     * @param conf        The configuration to use if we need to create a new codec.
+     * @param out         The output stream to wrap.
+     * @return            The new output stream
+     * @throws IOException
+     */
+    static CompressionOutputStream createOutputStreamWithCodecPool(
+        CompressionCodec codec, Configuration conf, OutputStream out)
+        throws IOException {
+      Compressor compressor = CodecPool.getCompressor(codec, conf);
+      CompressionOutputStream stream = null;
+      try {
+        stream = codec.createOutputStream(out, compressor);
+      } finally {
+        if (stream == null) {
+          CodecPool.returnCompressor(compressor);
+        } else {
+          stream.setTrackedCompressor(compressor);
+        }
+      }
+      return stream;
+    }
+
+    /**
+     * Create an input stream with a codec taken from the global CodecPool.
+     *
+     * @param codec       The codec to use to create the input stream.
+     * @param conf        The configuration to use if we need to create a new codec.
+     * @param in          The input stream to wrap.
+     * @return            The new input stream
+     * @throws IOException
+     */
+    static CompressionInputStream createInputStreamWithCodecPool(
+        CompressionCodec codec,  Configuration conf, InputStream in)
+          throws IOException {
+      Decompressor decompressor = CodecPool.getDecompressor(codec);
+      CompressionInputStream stream = null;
+      try {
+        stream = codec.createInputStream(in, decompressor);
+      } finally {
+        if (stream == null) {
+          CodecPool.returnDecompressor(decompressor);
+        } else {
+          stream.setTrackedDecompressor(decompressor);
+        }
+      }
+      return stream;
+    }
+  }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java Fri Jul 25 20:33:09 2014
@@ -41,6 +41,8 @@ public abstract class CompressionInputSt
   protected final InputStream in;
   protected long maxAvailableData = 0L;
 
+  private Decompressor trackedDecompressor;
+
   /**
    * Create a compression input stream that reads
    * the decompressed bytes from the given stream.
@@ -58,6 +60,10 @@ public abstract class CompressionInputSt
   @Override
   public void close() throws IOException {
     in.close();
+    if (trackedDecompressor != null) {
+      CodecPool.returnDecompressor(trackedDecompressor);
+      trackedDecompressor = null;
+    }
   }
   
   /**
@@ -112,4 +118,8 @@ public abstract class CompressionInputSt
   public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException {
     throw new UnsupportedOperationException();
   }
+
+  void setTrackedDecompressor(Decompressor decompressor) {
+    trackedDecompressor = decompressor;
+  }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java Fri Jul 25 20:33:09 2014
@@ -34,7 +34,13 @@ public abstract class CompressionOutputS
    * The output stream to be compressed. 
    */
   protected final OutputStream out;
-  
+
+  /**
+   * If non-null, this is the Compressor object that we should call
+   * CodecPool#returnCompressor on when this stream is closed.
+   */
+  private Compressor trackedCompressor;
+
   /**
    * Create a compression output stream that writes
    * the compressed bytes to the given stream.
@@ -43,11 +49,19 @@ public abstract class CompressionOutputS
   protected CompressionOutputStream(OutputStream out) {
     this.out = out;
   }
-  
+
+  void setTrackedCompressor(Compressor compressor) {
+    trackedCompressor = compressor;
+  }
+
   @Override
   public void close() throws IOException {
     finish();
     out.close();
+    if (trackedCompressor != null) {
+      CodecPool.returnCompressor(trackedCompressor);
+      trackedCompressor = null;
+    }
   }
   
   @Override

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java Fri Jul 25 20:33:09 2014
@@ -51,14 +51,8 @@ public class DefaultCodec implements Con
   @Override
   public CompressionOutputStream createOutputStream(OutputStream out) 
   throws IOException {
-    // This may leak memory if called in a loop. The createCompressor() call
-    // may cause allocation of an untracked direct-backed buffer if native
-    // libs are being used (even if you close the stream).  A Compressor
-    // object should be reused between successive calls.
-    LOG.warn("DefaultCodec.createOutputStream() may leak memory. "
-        + "Create a compressor first.");
-    return new CompressorStream(out, createCompressor(), 
-                                conf.getInt("io.file.buffer.size", 4*1024));
+    return CompressionCodec.Util.
+        createOutputStreamWithCodecPool(this, conf, out);
   }
 
   @Override
@@ -82,8 +76,8 @@ public class DefaultCodec implements Con
   @Override
   public CompressionInputStream createInputStream(InputStream in) 
   throws IOException {
-    return new DecompressorStream(in, createDecompressor(),
-                                  conf.getInt("io.file.buffer.size", 4*1024));
+    return CompressionCodec.Util.
+        createInputStreamWithCodecPool(this, conf, in);
   }
 
   @Override

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java Fri Jul 25 20:33:09 2014
@@ -159,10 +159,11 @@ public class GzipCodec extends DefaultCo
   @Override
   public CompressionOutputStream createOutputStream(OutputStream out) 
     throws IOException {
-    return (ZlibFactory.isNativeZlibLoaded(conf)) ?
-               new CompressorStream(out, createCompressor(),
-                                    conf.getInt("io.file.buffer.size", 4*1024)) :
-               new GzipOutputStream(out);
+    if (!ZlibFactory.isNativeZlibLoaded(conf)) {
+      return new GzipOutputStream(out);
+    }
+    return CompressionCodec.Util.
+        createOutputStreamWithCodecPool(this, conf, out);
   }
   
   @Override
@@ -192,8 +193,9 @@ public class GzipCodec extends DefaultCo
 
   @Override
   public CompressionInputStream createInputStream(InputStream in)
-  throws IOException {
-    return createInputStream(in, null);
+      throws IOException {
+    return CompressionCodec.Util.
+        createInputStreamWithCodecPool(this, conf, in);
   }
 
   @Override

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java Fri Jul 25 20:33:09 2014
@@ -84,7 +84,8 @@ public class Lz4Codec implements Configu
   @Override
   public CompressionOutputStream createOutputStream(OutputStream out)
       throws IOException {
-    return createOutputStream(out, createCompressor());
+    return CompressionCodec.Util.
+        createOutputStreamWithCodecPool(this, conf, out);
   }
 
   /**
@@ -157,7 +158,8 @@ public class Lz4Codec implements Configu
   @Override
   public CompressionInputStream createInputStream(InputStream in)
       throws IOException {
-    return createInputStream(in, createDecompressor());
+    return CompressionCodec.Util.
+        createInputStreamWithCodecPool(this, conf, in);
   }
 
   /**

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java Fri Jul 25 20:33:09 2014
@@ -95,7 +95,8 @@ public class SnappyCodec implements Conf
   @Override
   public CompressionOutputStream createOutputStream(OutputStream out)
       throws IOException {
-    return createOutputStream(out, createCompressor());
+    return CompressionCodec.Util.
+        createOutputStreamWithCodecPool(this, conf, out);
   }
 
   /**
@@ -158,7 +159,8 @@ public class SnappyCodec implements Conf
   @Override
   public CompressionInputStream createInputStream(InputStream in)
       throws IOException {
-    return createInputStream(in, createDecompressor());
+    return CompressionCodec.Util.
+        createInputStreamWithCodecPool(this, conf, in);
   }
 
   /**

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Fri Jul 25 20:33:09 2014
@@ -599,24 +599,35 @@ public class ProtobufRpcEngine implement
             .mergeFrom(request.theRequestRead).build();
         
         Message result;
+        long startTime = Time.now();
+        int qTime = (int) (startTime - receiveTime);
+        Exception exception = null;
         try {
-          long startTime = Time.now();
           server.rpcDetailedMetrics.init(protocolImpl.protocolClass);
           result = service.callBlockingMethod(methodDescriptor, null, param);
+        } catch (ServiceException e) {
+          exception = (Exception) e.getCause();
+          throw (Exception) e.getCause();
+        } catch (Exception e) {
+          exception = e;
+          throw e;
+        } finally {
           int processingTime = (int) (Time.now() - startTime);
-          int qTime = (int) (startTime - receiveTime);
           if (LOG.isDebugEnabled()) {
-            LOG.info("Served: " + methodName + " queueTime= " + qTime +
-                      " procesingTime= " + processingTime);
+            String msg = "Served: " + methodName + " queueTime= " + qTime +
+                " procesingTime= " + processingTime;
+            if (exception != null) {
+              msg += " exception= " + exception.getClass().getSimpleName();
+            }
+            LOG.debug(msg);
           }
+          String detailedMetricsName = (exception == null) ?
+              methodName :
+              exception.getClass().getSimpleName();
           server.rpcMetrics.addRpcQueueTime(qTime);
           server.rpcMetrics.addRpcProcessingTime(processingTime);
-          server.rpcDetailedMetrics.addProcessingTime(methodName,
+          server.rpcDetailedMetrics.addProcessingTime(detailedMetricsName,
               processingTime);
-        } catch (ServiceException e) {
-          throw (Exception) e.getCause();
-        } catch (Exception e) {
-          throw e;
         }
         return new RpcResponseWrapper(result);
       }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Jul 25 20:33:09 2014
@@ -355,8 +355,8 @@ public abstract class Server {
   private int readThreads;                        // number of read threads
   private int readerPendingConnectionQueue;         // number of connections to queue per read thread
   private Class<? extends Writable> rpcRequestClass;   // class used for deserializing the rpc request
-  protected RpcMetrics rpcMetrics;
-  protected RpcDetailedMetrics rpcDetailedMetrics;
+  final protected RpcMetrics rpcMetrics;
+  final protected RpcDetailedMetrics rpcDetailedMetrics;
   
   private Configuration conf;
   private String portRangeConfig = null;
@@ -2494,12 +2494,8 @@ public abstract class Server {
     listener.doStop();
     responder.interrupt();
     notifyAll();
-    if (this.rpcMetrics != null) {
-      this.rpcMetrics.shutdown();
-    }
-    if (this.rpcDetailedMetrics != null) {
-      this.rpcDetailedMetrics.shutdown();
-    }
+    this.rpcMetrics.shutdown();
+    this.rpcDetailedMetrics.shutdown();
   }
 
   /** Wait for the server to be stopped.

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Fri Jul 25 20:33:09 2014
@@ -471,37 +471,29 @@ public class WritableRpcEngine implement
           
 
           // Invoke the protocol method
+       long startTime = Time.now();
+       int qTime = (int) (startTime-receivedTime);
+       Exception exception = null;
        try {
-          long startTime = Time.now();
-          Method method = 
+          Method method =
               protocolImpl.protocolClass.getMethod(call.getMethodName(),
               call.getParameterClasses());
           method.setAccessible(true);
           server.rpcDetailedMetrics.init(protocolImpl.protocolClass);
           Object value = 
               method.invoke(protocolImpl.protocolImpl, call.getParameters());
-          int processingTime = (int) (Time.now() - startTime);
-          int qTime = (int) (startTime-receivedTime);
-          if (LOG.isDebugEnabled()) {
-            LOG.debug("Served: " + call.getMethodName() +
-                      " queueTime= " + qTime +
-                      " procesingTime= " + processingTime);
-          }
-          server.rpcMetrics.addRpcQueueTime(qTime);
-          server.rpcMetrics.addRpcProcessingTime(processingTime);
-          server.rpcDetailedMetrics.addProcessingTime(call.getMethodName(),
-                                               processingTime);
           if (server.verbose) log("Return: "+value);
-
           return new ObjectWritable(method.getReturnType(), value);
 
         } catch (InvocationTargetException e) {
           Throwable target = e.getTargetException();
           if (target instanceof IOException) {
+            exception = (IOException)target;
             throw (IOException)target;
           } else {
             IOException ioe = new IOException(target.toString());
             ioe.setStackTrace(target.getStackTrace());
+            exception = ioe;
             throw ioe;
           }
         } catch (Throwable e) {
@@ -510,8 +502,27 @@ public class WritableRpcEngine implement
           }
           IOException ioe = new IOException(e.toString());
           ioe.setStackTrace(e.getStackTrace());
+          exception = ioe;
           throw ioe;
-        }
+        } finally {
+         int processingTime = (int) (Time.now() - startTime);
+         if (LOG.isDebugEnabled()) {
+           String msg = "Served: " + call.getMethodName() +
+               " queueTime= " + qTime +
+               " procesingTime= " + processingTime;
+           if (exception != null) {
+             msg += " exception= " + exception.getClass().getSimpleName();
+           }
+           LOG.debug(msg);
+         }
+         String detailedMetricsName = (exception == null) ?
+             call.getMethodName() :
+             exception.getClass().getSimpleName();
+         server.rpcMetrics.addRpcQueueTime(qTime);
+         server.rpcMetrics.addRpcProcessingTime(processingTime);
+         server.rpcDetailedMetrics.addProcessingTime(detailedMetricsName,
+             processingTime);
+       }
       }
     }
   }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java Fri Jul 25 20:33:09 2014
@@ -55,6 +55,12 @@ public abstract class MetricsSystem impl
   public abstract <T> T register(String name, String desc, T source);
 
   /**
+   * Unregister a metrics source
+   * @param name of the source. This is the name you use to call register()
+   */
+  public abstract void unregisterSource(String name);
+
+  /**
    * Register a metrics source (deriving name and description from the object)
    * @param <T>   the actual type of the source object
    * @param source  object to register

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java Fri Jul 25 20:33:09 2014
@@ -85,7 +85,7 @@ class MetricsConfig extends SubsetConfig
   private ClassLoader pluginLoader;
 
   MetricsConfig(Configuration c, String prefix) {
-    super(c, prefix, ".");
+    super(c, prefix.toLowerCase(Locale.US), ".");
   }
 
   static MetricsConfig create(String prefix) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java Fri Jul 25 20:33:09 2014
@@ -232,6 +232,17 @@ public class MetricsSystemImpl extends M
     return source;
   }
 
+  @Override public synchronized
+  void unregisterSource(String name) {
+    if (sources.containsKey(name)) {
+      sources.get(name).stop();
+      sources.remove(name);
+    }
+    if (allSources.containsKey(name)) {
+      allSources.remove(name);
+    }
+  }
+
   synchronized
   void registerSource(String name, String desc, MetricsSource source) {
     checkNotNull(config, "config");

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java Fri Jul 25 20:33:09 2014
@@ -883,8 +883,8 @@ public class NetworkTopology {
    * @param seed Used to seed the pseudo-random generator that randomizes the
    *          set of nodes at each network distance.
    */
-  public void sortByDistance(Node reader, Node[] nodes,
-      int activeLen, long seed) {
+  public void sortByDistance(Node reader, Node[] nodes, int activeLen,
+      long seed, boolean randomizeBlockLocationsPerBlock) {
     /** Sort weights for the nodes array */
     int[] weights = new int[activeLen];
     for (int i=0; i<activeLen; i++) {
@@ -906,8 +906,11 @@ public class NetworkTopology {
     // Seed is normally the block id
     // This means we use the same pseudo-random order for each block, for
     // potentially better page cache usage.
+    // Seed is not used if we want to randomize block location for every block
     Random rand = getRandom();
-    rand.setSeed(seed);
+    if (!randomizeBlockLocationsPerBlock) {
+      rand.setSeed(seed);
+    }
     int idx = 0;
     for (List<Node> list: tree.values()) {
       if (list != null) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java Fri Jul 25 20:33:09 2014
@@ -279,8 +279,8 @@ public class NetworkTopologyWithNodeGrou
    *          set of nodes at each network distance.
    */
   @Override
-  public void sortByDistance( Node reader, Node[] nodes,
-      int activeLen, long seed) {
+  public void sortByDistance(Node reader, Node[] nodes, int activeLen,
+      long seed, boolean randomizeBlockLocationsPerBlock) {
     // If reader is not a datanode (not in NetworkTopology tree), we need to
     // replace this reader with a sibling leaf node in tree.
     if (reader != null && !this.contains(reader)) {
@@ -293,7 +293,8 @@ public class NetworkTopologyWithNodeGrou
         return;
       }
     }
-    super.sortByDistance(reader, nodes, nodes.length, seed);
+    super.sortByDistance(reader, nodes, nodes.length, seed,
+        randomizeBlockLocationsPerBlock);
   }
 
   /** InnerNodeWithNodeGroup represents a switch/router of a data center, rack

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java Fri Jul 25 20:33:09 2014
@@ -33,7 +33,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.util.Timer;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -58,24 +58,35 @@ public class Groups {
   private final Map<String, List<String>> staticUserToGroupsMap = 
       new HashMap<String, List<String>>();
   private final long cacheTimeout;
+  private final long negativeCacheTimeout;
   private final long warningDeltaMs;
+  private final Timer timer;
 
   public Groups(Configuration conf) {
+    this(conf, new Timer());
+  }
+
+  public Groups(Configuration conf, Timer timer) {
     impl = 
       ReflectionUtils.newInstance(
           conf.getClass(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, 
                         ShellBasedUnixGroupsMapping.class, 
                         GroupMappingServiceProvider.class), 
           conf);
-    
+
     cacheTimeout = 
       conf.getLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS, 
           CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_SECS_DEFAULT) * 1000;
+    negativeCacheTimeout =
+      conf.getLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS,
+          CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS_DEFAULT) * 1000;
     warningDeltaMs =
       conf.getLong(CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS,
         CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS_DEFAULT);
     parseStaticMapping(conf);
 
+    this.timer = timer;
+
     if(LOG.isDebugEnabled())
       LOG.debug("Group mapping impl=" + impl.getClass().getName() + 
           "; cacheTimeout=" + cacheTimeout + "; warningDeltaMs=" +
@@ -111,7 +122,29 @@ public class Groups {
       staticUserToGroupsMap.put(user, groups);
     }
   }
+
+  /**
+   * Determine whether the CachedGroups is expired.
+   * @param groups cached groups for one user.
+   * @return true if groups is expired from useToGroupsMap.
+   */
+  private boolean hasExpired(CachedGroups groups, long startMs) {
+    if (groups == null) {
+      return true;
+    }
+    long timeout = cacheTimeout;
+    if (isNegativeCacheEnabled() && groups.getGroups().isEmpty()) {
+      // This CachedGroups is in the negative cache, thus it should expire
+      // sooner.
+      timeout = negativeCacheTimeout;
+    }
+    return groups.getTimestamp() + timeout <= startMs;
+  }
   
+  private boolean isNegativeCacheEnabled() {
+    return negativeCacheTimeout > 0;
+  }
+
   /**
    * Get the group memberships of a given user.
    * @param user User's name
@@ -126,18 +159,22 @@ public class Groups {
     }
     // Return cached value if available
     CachedGroups groups = userToGroupsMap.get(user);
-    long startMs = Time.monotonicNow();
-    // if cache has a value and it hasn't expired
-    if (groups != null && (groups.getTimestamp() + cacheTimeout > startMs)) {
+    long startMs = timer.monotonicNow();
+    if (!hasExpired(groups, startMs)) {
       if(LOG.isDebugEnabled()) {
         LOG.debug("Returning cached groups for '" + user + "'");
       }
+      if (groups.getGroups().isEmpty()) {
+        // Even with enabling negative cache, getGroups() has the same behavior
+        // that throws IOException if the groups for the user is empty.
+        throw new IOException("No groups found for user " + user);
+      }
       return groups.getGroups();
     }
 
     // Create and cache user's groups
     List<String> groupList = impl.getGroups(user);
-    long endMs = Time.monotonicNow();
+    long endMs = timer.monotonicNow();
     long deltaMs = endMs - startMs ;
     UserGroupInformation.metrics.addGetGroups(deltaMs);
     if (deltaMs > warningDeltaMs) {
@@ -146,6 +183,9 @@ public class Groups {
     }
     groups = new CachedGroups(groupList, endMs);
     if (groups.getGroups().isEmpty()) {
+      if (isNegativeCacheEnabled()) {
+        userToGroupsMap.put(user, groups);
+      }
       throw new IOException("No groups found for user " + user);
     }
     userToGroupsMap.put(user, groups);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java Fri Jul 25 20:33:09 2014
@@ -201,7 +201,8 @@ public class LdapGroupsMapping
     } catch (CommunicationException e) {
       LOG.warn("Connection is closed, will try to reconnect");
     } catch (NamingException e) {
-      LOG.warn("Exception trying to get groups for user " + user, e);
+      LOG.warn("Exception trying to get groups for user " + user + ": "
+          + e.getMessage());
       return emptyResults;
     }
 
@@ -215,7 +216,8 @@ public class LdapGroupsMapping
       } catch (CommunicationException e) {
         LOG.warn("Connection being closed, reconnecting failed, retryCount = " + retryCount);
       } catch (NamingException e) {
-        LOG.warn("Exception trying to get groups for user " + user, e);
+        LOG.warn("Exception trying to get groups for user " + user + ":"
+            + e.getMessage());
         return emptyResults;
       }
     }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java Fri Jul 25 20:33:09 2014
@@ -84,7 +84,8 @@ public class ShellBasedUnixGroupsMapping
       result = Shell.execCommand(Shell.getGroupsForUserCommand(user));
     } catch (ExitCodeException e) {
       // if we didn't get the group - just return empty list;
-      LOG.warn("got exception trying to get groups for user " + user, e);
+      LOG.warn("got exception trying to get groups for user " + user + ": "
+          + e.getMessage());
       return new LinkedList<String>();
     }
     

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialProvider.java Fri Jul 25 20:33:09 2014
@@ -29,6 +29,8 @@ import org.apache.hadoop.classification.
  * abstraction to separate credential storage from users of them. It
  * is intended to support getting or storing passwords in a variety of ways,
  * including third party bindings.
+ * 
+ * <code>CredentialProvider</code> implementations must be thread safe.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java Fri Jul 25 20:33:09 2014
@@ -264,7 +264,7 @@ public class CredentialShell extends Con
                   alias + " from CredentialProvider " + provider.toString() +
                   ". Continue?:");
           if (!cont) {
-            out.println("Nothing has been be deleted.");
+            out.println("Nothing has been deleted.");
           }
           return cont;
         } catch (IOException e) {
@@ -373,12 +373,12 @@ public class CredentialShell extends Con
       char[] newPassword2 = c.readPassword("Enter password again: ");
       noMatch = !Arrays.equals(newPassword1, newPassword2);
       if (noMatch) {
-        Arrays.fill(newPassword1, ' ');
+        if (newPassword1 != null) Arrays.fill(newPassword1, ' ');
         c.format("Passwords don't match. Try again.%n");
       } else {
         cred = newPassword1;
       }
-      Arrays.fill(newPassword2, ' ');
+      if (newPassword2 != null) Arrays.fill(newPassword2, ' ');
     } while (noMatch);
     return cred;
   }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java Fri Jul 25 20:33:09 2014
@@ -194,15 +194,18 @@ public class JavaKeyStoreProvider extend
   @Override
   public CredentialEntry createCredentialEntry(String alias, char[] credential)
       throws IOException {
+    writeLock.lock();
     try {
       if (keyStore.containsAlias(alias) || cache.containsKey(alias)) {
         throw new IOException("Credential " + alias + " already exists in " + this);
       }
+      return innerSetCredential(alias, credential);
     } catch (KeyStoreException e) {
       throw new IOException("Problem looking up credential " + alias + " in " + this,
           e);
+    } finally {
+      writeLock.unlock();
     }
-    return innerSetCredential(alias, credential);
   }
 
   @Override
@@ -230,6 +233,7 @@ public class JavaKeyStoreProvider extend
 
   CredentialEntry innerSetCredential(String alias, char[] material)
       throws IOException {
+    writeLock.lock();
     try {
       keyStore.setKeyEntry(alias, new SecretKeySpec(
           new String(material).getBytes("UTF-8"), "AES"),
@@ -237,6 +241,8 @@ public class JavaKeyStoreProvider extend
     } catch (KeyStoreException e) {
       throw new IOException("Can't store credential " + alias + " in " + this,
           e);
+    } finally {
+      writeLock.unlock();
     }
     changed = true;
     return new CredentialEntry(alias, material);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java Fri Jul 25 20:33:09 2014
@@ -55,7 +55,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public CredentialEntry getCredentialEntry(String alias) {
+  public synchronized CredentialEntry getCredentialEntry(String alias) {
     byte[] bytes = credentials.getSecretKey(new Text(alias));
     if (bytes == null) {
       return null;
@@ -64,7 +64,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public CredentialEntry createCredentialEntry(String name, char[] credential) 
+  public synchronized CredentialEntry createCredentialEntry(String name, char[] credential) 
       throws IOException {
     Text nameT = new Text(name);
     if (credentials.getSecretKey(nameT) != null) {
@@ -77,7 +77,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public void deleteCredentialEntry(String name) throws IOException {
+  public synchronized void deleteCredentialEntry(String name) throws IOException {
     byte[] cred = credentials.getSecretKey(new Text(name));
     if (cred != null) {
       credentials.removeSecretKey(new Text(name));
@@ -95,7 +95,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public void flush() {
+  public synchronized void flush() {
     user.addCredentials(credentials);
   }
 
@@ -112,7 +112,7 @@ public class UserProvider extends Creden
   }
 
   @Override
-  public List<String> getAliases() throws IOException {
+  public synchronized List<String> getAliases() throws IOException {
     List<String> list = new ArrayList<String>();
     List<Text> aliases = credentials.getAllSecretKeys();
     for (Text key : aliases) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java Fri Jul 25 20:33:09 2014
@@ -24,37 +24,64 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.regex.Pattern;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.MachineList;
 
 import com.google.common.annotations.VisibleForTesting;
 
+@InterfaceStability.Unstable
+@InterfaceAudience.Public
 public class DefaultImpersonationProvider implements ImpersonationProvider {
   private static final String CONF_HOSTS = ".hosts";
   private static final String CONF_USERS = ".users";
   private static final String CONF_GROUPS = ".groups";
-  private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
-  private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
-  private static final String CONF_HADOOP_PROXYUSER_RE_USERS_GROUPS = 
-      CONF_HADOOP_PROXYUSER_RE+"[^.]*(" + Pattern.quote(CONF_USERS) +
-      "|" + Pattern.quote(CONF_GROUPS) + ")";
-  private static final String CONF_HADOOP_PROXYUSER_RE_HOSTS = 
-      CONF_HADOOP_PROXYUSER_RE+"[^.]*"+ Pattern.quote(CONF_HOSTS);
   // acl and list of hosts per proxyuser
   private Map<String, AccessControlList> proxyUserAcl = 
     new HashMap<String, AccessControlList>();
-  private static Map<String, MachineList> proxyHosts = 
+  private Map<String, MachineList> proxyHosts =
     new HashMap<String, MachineList>();
   private Configuration conf;
 
+
+  private static DefaultImpersonationProvider testProvider;
+
+  public static synchronized DefaultImpersonationProvider getTestProvider() {
+    if (testProvider == null) {
+      testProvider = new DefaultImpersonationProvider();
+      testProvider.setConf(new Configuration());
+      testProvider.init(ProxyUsers.CONF_HADOOP_PROXYUSER);
+    }
+    return testProvider;
+  }
+
   @Override
   public void setConf(Configuration conf) {
     this.conf = conf;
+  }
+
+  private String configPrefix;
+
+  @Override
+  public void init(String configurationPrefix) {
+    configPrefix = configurationPrefix +
+        (configurationPrefix.endsWith(".") ? "" : ".");
+    
+    // constructing regex to match the following patterns:
+    //   $configPrefix.[ANY].users
+    //   $configPrefix.[ANY].groups
+    //   $configPrefix.[ANY].hosts
+    //
+    String prefixRegEx = configPrefix.replace(".", "\\.");
+    String usersGroupsRegEx = prefixRegEx + "[^.]*(" +
+        Pattern.quote(CONF_USERS) + "|" + Pattern.quote(CONF_GROUPS) + ")";
+    String hostsRegEx = prefixRegEx + "[^.]*" + Pattern.quote(CONF_HOSTS);
 
-    // get list of users and groups per proxyuser
+  // get list of users and groups per proxyuser
     Map<String,String> allMatchKeys = 
-        conf.getValByRegex(CONF_HADOOP_PROXYUSER_RE_USERS_GROUPS); 
+        conf.getValByRegex(usersGroupsRegEx);
     for(Entry<String, String> entry : allMatchKeys.entrySet()) {  
       String aclKey = getAclKey(entry.getKey());
       if (!proxyUserAcl.containsKey(aclKey)) {
@@ -65,7 +92,7 @@ public class DefaultImpersonationProvide
     }
 
     // get hosts per proxyuser
-    allMatchKeys = conf.getValByRegex(CONF_HADOOP_PROXYUSER_RE_HOSTS);
+    allMatchKeys = conf.getValByRegex(hostsRegEx);
     for(Entry<String, String> entry : allMatchKeys.entrySet()) {
       proxyHosts.put(entry.getKey(),
           new MachineList(entry.getValue()));
@@ -86,8 +113,8 @@ public class DefaultImpersonationProvide
       return;
     }
     
-    AccessControlList acl = proxyUserAcl.get(
-        CONF_HADOOP_PROXYUSER+realUser.getShortUserName());
+    AccessControlList acl = proxyUserAcl.get(configPrefix +
+        realUser.getShortUserName());
     if (acl == null || !acl.isUserAllowed(user)) {
       throw new AuthorizationException("User: " + realUser.getUserName()
           + " is not allowed to impersonate " + user.getUserName());
@@ -116,8 +143,8 @@ public class DefaultImpersonationProvide
    * @param userName name of the superuser
    * @return configuration key for superuser usergroups
    */
-  public static String getProxySuperuserUserConfKey(String userName) {
-    return CONF_HADOOP_PROXYUSER+userName+CONF_USERS;
+  public String getProxySuperuserUserConfKey(String userName) {
+    return configPrefix + userName + CONF_USERS;
   }
 
   /**
@@ -126,8 +153,8 @@ public class DefaultImpersonationProvide
    * @param userName name of the superuser
    * @return configuration key for superuser groups
    */
-  public static String getProxySuperuserGroupConfKey(String userName) {
-    return CONF_HADOOP_PROXYUSER+userName+CONF_GROUPS;
+  public String getProxySuperuserGroupConfKey(String userName) {
+    return configPrefix + userName + CONF_GROUPS;
   }
 
   /**
@@ -136,8 +163,8 @@ public class DefaultImpersonationProvide
    * @param userName name of the superuser
    * @return configuration key for superuser ip-addresses
    */
-  public static String getProxySuperuserIpConfKey(String userName) {
-    return CONF_HADOOP_PROXYUSER+userName+CONF_HOSTS;
+  public String getProxySuperuserIpConfKey(String userName) {
+    return configPrefix + userName + CONF_HOSTS;
   }
 
   @VisibleForTesting

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java Fri Jul 25 20:33:09 2014
@@ -18,10 +18,25 @@
 
 package org.apache.hadoop.security.authorize;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.security.UserGroupInformation;
 
+@InterfaceStability.Unstable
+@InterfaceAudience.Public
 public interface ImpersonationProvider  extends Configurable {
+
+
+  /**
+   * Specifies the configuration prefix for the proxy user properties and
+   * initializes the provider.
+   *
+   * @param configurationPrefix the configuration prefix for the proxy user
+   * properties
+   */
+  public void init(String configurationPrefix);
+
   /**
    * Authorize the superuser which is doing doAs
    * 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java Fri Jul 25 20:33:09 2014
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.security.authorize;
 
+import com.google.common.base.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -26,9 +28,12 @@ import org.apache.hadoop.util.Reflection
 
 import com.google.common.annotations.VisibleForTesting;
 
+@InterfaceStability.Unstable
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive"})
 public class ProxyUsers {
 
+  public static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser";
+
   private static volatile ImpersonationProvider sip ;
 
   /**
@@ -54,15 +59,31 @@ public class ProxyUsers {
   }
 
   /**
-   * refresh configuration
-   * @param conf
+   * Refreshes configuration using the specified Proxy user prefix for
+   * properties.
+   *
+   * @param conf configuration
+   * @param proxyUserPrefix proxy user configuration prefix
    */
-  public static void refreshSuperUserGroupsConfiguration(Configuration conf) { 
+  public static void refreshSuperUserGroupsConfiguration(Configuration conf,
+      String proxyUserPrefix) {
+    Preconditions.checkArgument(proxyUserPrefix != null && 
+        !proxyUserPrefix.isEmpty(), "prefix cannot be NULL or empty");
     // sip is volatile. Any assignment to it as well as the object's state
     // will be visible to all the other threads. 
-    sip = getInstance(conf);
+    ImpersonationProvider ip = getInstance(conf);
+    ip.init(proxyUserPrefix);
+    sip = ip;
     ProxyServers.refresh(conf);
   }
+
+  /**
+   * Refreshes configuration using the default Proxy user prefix for properties.
+   * @param conf configuration
+   */
+  public static void refreshSuperUserGroupsConfiguration(Configuration conf) {
+    refreshSuperUserGroupsConfiguration(conf, CONF_HADOOP_PROXYUSER);
+  }
   
   /**
    * Authorize the superuser which is doing doAs

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Tool.java Fri Jul 25 20:33:09 2014
@@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configurab
  * 
  * <p><code>Tool</code>, is the standard for any Map-Reduce tool/application. 
  * The tool/application should delegate the handling of 
- * <a href="{@docRoot}/org/apache/hadoop/util/GenericOptionsParser.html#GenericOptions">
+ * <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/CommandsManual.html#Generic_Options">
  * standard command-line options</a> to {@link ToolRunner#run(Tool, String[])} 
  * and only handle its custom arguments.</p>
  * 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ToolRunner.java Fri Jul 25 20:33:09 2014
@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configurat
  * <p><code>ToolRunner</code> can be used to run classes implementing 
  * <code>Tool</code> interface. It works in conjunction with 
  * {@link GenericOptionsParser} to parse the 
- * <a href="{@docRoot}/org/apache/hadoop/util/GenericOptionsParser.html#GenericOptions">
+ * <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/CommandsManual.html#Generic_Options">
  * generic hadoop command line arguments</a> and modifies the 
  * <code>Configuration</code> of the <code>Tool</code>. The 
  * application-specific options are passed along without being modified.

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Fri Jul 25 20:33:09 2014
@@ -379,6 +379,7 @@ Java_org_apache_hadoop_io_compress_zlib_
       return (*env)->NewStringUTF(env, dl_info.dli_fname);
     }
   }
+  return (*env)->NewStringUTF(env, "Unavailable");
 #endif
 
 #ifdef WINDOWS

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Fri Jul 25 20:33:09 2014
@@ -580,6 +580,8 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jint uid)
 {
 #ifdef UNIX
+  jstring jstr_username = NULL;
+  char *pw_buf = NULL;
   int pw_lock_locked = 0;
   if (pw_lock_object != NULL) {
     if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
@@ -588,7 +590,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     pw_lock_locked = 1;
   }
 
-  char *pw_buf = NULL;
   int rc;
   size_t pw_buflen = get_pw_buflen();
   if ((pw_buf = malloc(pw_buflen)) == NULL) {
@@ -623,7 +624,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     goto cleanup;
   }
 
-  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+  jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
 
 cleanup:
   if (pw_lock_locked) {
@@ -664,7 +665,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #ifdef WINDOWS
   THROW(env, "java/io/IOException",
     "The function POSIX.mmap() is not supported on Windows");
-  return NULL;
+  return (jlong)(intptr_t)NULL;
 #endif
 }
 
@@ -684,7 +685,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #ifdef WINDOWS
   THROW(env, "java/io/IOException",
     "The function POSIX.munmap() is not supported on Windows");
-  return NULL;
 #endif
 }
 
@@ -700,6 +700,8 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jint gid)
 {
 #ifdef UNIX
+  jstring jstr_groupname = NULL;
+  char *pw_buf = NULL;
   int pw_lock_locked = 0;
  
   if (pw_lock_object != NULL) {
@@ -709,7 +711,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     pw_lock_locked = 1;
   }
   
-  char *pw_buf = NULL;
   int rc;
   size_t pw_buflen = get_pw_buflen();
   if ((pw_buf = malloc(pw_buflen)) == NULL) {
@@ -744,7 +745,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
     goto cleanup;
   }
 
-  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+  jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
   PASS_EXCEPTIONS_GOTO(env, cleanup);
   
 cleanup:
@@ -922,7 +923,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #ifdef UNIX
   THROW(env, "java/io/IOException",
     "The function setFilePointer(FileDescriptor) is not supported on Unix");
-  return NULL;
+  return (jlong)(intptr_t)NULL;
 #endif
 
 #ifdef WINDOWS
@@ -957,7 +958,7 @@ JNIEXPORT jboolean JNICALL Java_org_apac
 #ifdef UNIX
   THROW(env, "java/io/IOException",
     "The function access0(path, access) is not supported on Unix");
-  return NULL;
+  return (jlong)(intptr_t)NULL;
 #endif
 
 #ifdef WINDOWS

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c Fri Jul 25 20:33:09 2014
@@ -120,17 +120,19 @@ Java_org_apache_hadoop_net_unix_DomainSo
 JNIEnv *env, jobject obj, jint fd)
 {
   struct fd_set_data *sd;
-  struct pollfd *pollfd, *last_pollfd;
+  struct pollfd *pollfd = NULL, *last_pollfd;
   int used_size, i;
 
   sd = (struct fd_set_data*)(intptr_t)(*env)->
       GetLongField(env, obj, fd_set_data_fid);
   used_size = sd->used_size;
   for (i = 0; i < used_size; i++) {
-    pollfd = sd->pollfd + i;
-    if (pollfd->fd == fd) break;
+    if (sd->pollfd[i].fd == fd) {
+      pollfd = sd->pollfd + i;
+      break;
+    }
   }
-  if (i == used_size) {
+  if (pollfd == NULL) {
     (*env)->Throw(env, newRuntimeException(env, "failed to remove fd %d "
           "from the FdSet because it was never present.", fd));
     return;

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c Fri Jul 25 20:33:09 2014
@@ -45,7 +45,7 @@ static void throw_ioexception(JNIEnv* en
     FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
     NULL, *(DWORD*) (&errnum), // reinterpret cast
     MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
-    (LPSTR*)&buffer, 0, NULL);
+    buffer, 0, NULL);
 
   if (len > 0)
   {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c Fri Jul 25 20:33:09 2014
@@ -36,7 +36,7 @@
 struct hadoop_user_info *hadoop_user_info_alloc(void)
 {
   struct hadoop_user_info *uinfo;
-  size_t buf_sz;
+  long buf_sz;
   char *buf;
 
   uinfo = calloc(1, sizeof(struct hadoop_user_info));

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Fri Jul 25 20:33:09 2014
@@ -198,6 +198,20 @@ for ldap providers in the same way as ab
 </property>
 
 <property>
+  <name>hadoop.security.groups.negative-cache.secs</name>
+  <value>30</value>
+  <description>
+    Expiration time for entries in the the negative user-to-group mapping
+    caching, in seconds. This is useful when invalid users are retrying
+    frequently. It is suggested to set a small value for this expiration, since
+    a transient error in group lookup could temporarily lock out a legitimate
+    user.
+
+    Set this to zero or negative value to disable negative user-to-group caching.
+  </description>
+</property>
+
+<property>
   <name>hadoop.security.groups.cache.warn.after.ms</name>
   <value>5000</value>
   <description>
@@ -1455,4 +1469,37 @@ for ldap providers in the same way as ab
   <value>true</value>
   <description>Don't cache 'har' filesystem instances.</description>
 </property>
+
+<!--- KMSClientProvider configurations -->
+<property>
+  <name>hadoop.security.kms.client.encrypted.key.cache.size</name>
+  <value>500</value>
+  <description>
+    Size of the EncryptedKeyVersion cache Queue for each key
+  </description>
+</property>
+<property>
+  <name>hadoop.security.kms.client.encrypted.key.cache.low-watermark</name>
+  <value>0.3f</value>
+  <description>
+    If size of the EncryptedKeyVersion cache Queue falls below the
+    low watermark, this cache queue will be scheduled for a refill
+  </description>
+</property>
+<property>
+  <name>hadoop.security.kms.client.encrypted.key.cache.num.refill.threads</name>
+  <value>2</value>
+  <description>
+    Number of threads to use for refilling depleted EncryptedKeyVersion
+    cache Queues
+  </description>
+</property>
+<property>
+  <name>"hadoop.security.kms.client.encrypted.key.cache.expiry</name>
+  <value>43200000</value>
+  <description>
+    Cache expiry time for a Key, after which the cache Queue for this
+    key will be dropped. Default = 12hrs
+  </description>
+</property>
 </configuration>

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm Fri Jul 25 20:33:09 2014
@@ -127,7 +127,7 @@ User Commands
    Runs a HDFS filesystem checking utility.
    See {{{../hadoop-hdfs/HdfsUserGuide.html#fsck}fsck}} for more info.
 
-   Usage: <<<hadoop fsck [GENERIC_OPTIONS] <path> [-move | -delete | -openforwrite] [-files [-blocks [-locations | -racks]]]>>>
+   Usage: <<<hadoop fsck [GENERIC_OPTIONS] <path> [-move | -delete | -openforwrite] [-files [-blocks [-locations | -racks]]] [-showprogress]>>>
 
 *------------------+---------------------------------------------+
 ||  COMMAND_OPTION || Description
@@ -148,6 +148,8 @@ User Commands
 *------------------+---------------------------------------------+
 |   -racks         | Print out network topology for data-node locations.
 *------------------+---------------------------------------------+
+|   -showprogress  | Print out show progress in output. Default is OFF (no progress).
+*------------------+---------------------------------------------+
 
 * <<<fetchdt>>>
 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm Fri Jul 25 20:33:09 2014
@@ -116,6 +116,8 @@ Native Libraries Guide
 
      * zlib-development package (stable version >= 1.2.0)
 
+     * openssl-development package(e.g. libssl-dev)
+
    Once you installed the prerequisite packages use the standard hadoop
    pom.xml file and pass along the native flag to build the native hadoop 
    library:

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderCryptoExtension.java Fri Jul 25 20:33:09 2014
@@ -17,50 +17,112 @@
  */
 package org.apache.hadoop.crypto.key;
 
+import java.net.URI;
+import java.security.SecureRandom;
+import java.util.Arrays;
+
+import javax.crypto.Cipher;
+import javax.crypto.spec.IvParameterSpec;
+import javax.crypto.spec.SecretKeySpec;
+
 import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.net.URI;
-import java.security.SecureRandom;
+
+import static org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
 
 public class TestKeyProviderCryptoExtension {
 
   private static final String CIPHER = "AES";
+  private static final String ENCRYPTION_KEY_NAME = "fooKey";
 
-  @Test
-  public void testGenerateEncryptedKey() throws Exception {
-    Configuration conf = new Configuration();    
-    KeyProvider kp = 
-        new UserProvider.Factory().createProvider(new URI("user:///"), conf);
-    KeyProvider.Options options = new KeyProvider.Options(conf);
+  private static Configuration conf;
+  private static KeyProvider kp;
+  private static KeyProviderCryptoExtension kpExt;
+  private static KeyProvider.Options options;
+  private static KeyVersion encryptionKey;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = new Configuration();
+    kp = new UserProvider.Factory().createProvider(new URI("user:///"), conf);
+    kpExt = KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
+    options = new KeyProvider.Options(conf);
     options.setCipher(CIPHER);
     options.setBitLength(128);
-    KeyProvider.KeyVersion kv = kp.createKey("foo", SecureRandom.getSeed(16),
-        options);
-    KeyProviderCryptoExtension kpExt = 
-        KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
-    
+    encryptionKey =
+        kp.createKey(ENCRYPTION_KEY_NAME, SecureRandom.getSeed(16), options);
+  }
+
+  @Test
+  public void testGenerateEncryptedKey() throws Exception {
+    // Generate a new EEK and check it
     KeyProviderCryptoExtension.EncryptedKeyVersion ek1 = 
-        kpExt.generateEncryptedKey(kv);
-    Assert.assertEquals(KeyProviderCryptoExtension.EEK, 
-        ek1.getEncryptedKey().getVersionName());
-    Assert.assertNotNull(ek1.getEncryptedKey().getMaterial());
-    Assert.assertEquals(kv.getMaterial().length, 
-        ek1.getEncryptedKey().getMaterial().length);
-    KeyProvider.KeyVersion k1 = kpExt.decryptEncryptedKey(ek1);
-    Assert.assertEquals(KeyProviderCryptoExtension.EK, k1.getVersionName());
-    KeyProvider.KeyVersion k1a = kpExt.decryptEncryptedKey(ek1);
-    Assert.assertArrayEquals(k1.getMaterial(), k1a.getMaterial());
-    Assert.assertEquals(kv.getMaterial().length, k1.getMaterial().length);
+        kpExt.generateEncryptedKey(encryptionKey.getName());
+    assertEquals("Version name of EEK should be EEK",
+        KeyProviderCryptoExtension.EEK,
+        ek1.getEncryptedKeyVersion().getVersionName());
+    assertEquals("Name of EEK should be encryption key name",
+        ENCRYPTION_KEY_NAME, ek1.getEncryptionKeyName());
+    assertNotNull("Expected encrypted key material",
+        ek1.getEncryptedKeyVersion().getMaterial());
+    assertEquals("Length of encryption key material and EEK material should "
+            + "be the same", encryptionKey.getMaterial().length,
+        ek1.getEncryptedKeyVersion().getMaterial().length
+    );
+
+    // Decrypt EEK into an EK and check it
+    KeyVersion k1 = kpExt.decryptEncryptedKey(ek1);
+    assertEquals(KeyProviderCryptoExtension.EK, k1.getVersionName());
+    assertEquals(encryptionKey.getMaterial().length, k1.getMaterial().length);
+    if (Arrays.equals(k1.getMaterial(), encryptionKey.getMaterial())) {
+      fail("Encrypted key material should not equal encryption key material");
+    }
+    if (Arrays.equals(ek1.getEncryptedKeyVersion().getMaterial(),
+        encryptionKey.getMaterial())) {
+      fail("Encrypted key material should not equal decrypted key material");
+    }
+    // Decrypt it again and it should be the same
+    KeyVersion k1a = kpExt.decryptEncryptedKey(ek1);
+    assertArrayEquals(k1.getMaterial(), k1a.getMaterial());
 
+    // Generate another EEK and make sure it's different from the first
     KeyProviderCryptoExtension.EncryptedKeyVersion ek2 = 
-        kpExt.generateEncryptedKey(kv);
-    KeyProvider.KeyVersion k2 = kpExt.decryptEncryptedKey(ek2);
-    boolean eq = true;
-    for (int i = 0; eq && i < ek2.getEncryptedKey().getMaterial().length; i++) {
-      eq = k2.getMaterial()[i] == k1.getMaterial()[i];
+        kpExt.generateEncryptedKey(encryptionKey.getName());
+    KeyVersion k2 = kpExt.decryptEncryptedKey(ek2);
+    if (Arrays.equals(k1.getMaterial(), k2.getMaterial())) {
+      fail("Generated EEKs should have different material!");
     }
-    Assert.assertFalse(eq);
+    if (Arrays.equals(ek1.getEncryptedKeyIv(), ek2.getEncryptedKeyIv())) {
+      fail("Generated EEKs should have different IVs!");
+    }
+  }
+
+  @Test
+  public void testEncryptDecrypt() throws Exception {
+    // Get an EEK
+    KeyProviderCryptoExtension.EncryptedKeyVersion eek =
+        kpExt.generateEncryptedKey(encryptionKey.getName());
+    final byte[] encryptedKeyIv = eek.getEncryptedKeyIv();
+    final byte[] encryptedKeyMaterial = eek.getEncryptedKeyVersion()
+        .getMaterial();
+    // Decrypt it manually
+    Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding");
+    cipher.init(Cipher.DECRYPT_MODE,
+        new SecretKeySpec(encryptionKey.getMaterial(), "AES"),
+        new IvParameterSpec(KeyProviderCryptoExtension.EncryptedKeyVersion
+            .deriveIV(encryptedKeyIv)));
+    final byte[] manualMaterial = cipher.doFinal(encryptedKeyMaterial);
+    // Decrypt it with the API
+    KeyVersion decryptedKey = kpExt.decryptEncryptedKey(eek);
+    final byte[] apiMaterial = decryptedKey.getMaterial();
+
+    assertArrayEquals("Wrong key material from decryptEncryptedKey",
+        manualMaterial, apiMaterial);
   }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Fri Jul 25 20:33:09 2014
@@ -161,7 +161,7 @@ public class TestKeyShell {
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("key1 has not been created."));
   }
 
@@ -174,7 +174,7 @@ public class TestKeyShell {
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("key1 has not been created."));
   }
 
@@ -187,7 +187,7 @@ public class TestKeyShell {
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("There are no valid " +
     		"KeyProviders configured."));
   }
@@ -216,7 +216,7 @@ public class TestKeyShell {
     config.set(KeyProviderFactory.KEY_PROVIDER_PATH, "user:///");
     ks.setConf(config);
     rc = ks.run(args1);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
     assertTrue(outContent.toString().contains("There are no valid " +
     		"KeyProviders configured."));
   }
@@ -262,19 +262,19 @@ public class TestKeyShell {
     final String[] args2 = {"create", "keyattr2", "--provider", jceksProvider,
             "--attr", "=bar"};
     rc = ks.run(args2);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
 
     /* Not in attribute = value form */
     outContent.reset();
     args2[5] = "foo";
     rc = ks.run(args2);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
 
     /* No attribute or value */
     outContent.reset();
     args2[5] = "=";
     rc = ks.run(args2);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
 
     /* Legal: attribute is a, value is b=c */
     outContent.reset();
@@ -308,7 +308,7 @@ public class TestKeyShell {
             "--attr", "foo=bar",
             "--attr", "foo=glarch"};
     rc = ks.run(args4);
-    assertEquals(-1, rc);
+    assertEquals(1, rc);
 
     /* Clean up to be a good citizen */
     deleteKey(ks, "keyattr1");

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java Fri Jul 25 20:33:09 2014
@@ -29,14 +29,33 @@ import java.util.Random;
 
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
+
 import static org.junit.Assert.*;
 
 public class TestDFVariations {
-
+  private static final String TEST_ROOT_DIR =
+      System.getProperty("test.build.data","build/test/data") + "/TestDFVariations";
+  private static File test_root = null;
+
+  @Before
+  public void setup() throws IOException {
+    test_root = new File(TEST_ROOT_DIR);
+    test_root.mkdirs();
+  }
+  
+  @After
+  public void after() throws IOException {
+    FileUtil.setWritable(test_root, true);
+    FileUtil.fullyDelete(test_root);
+    assertTrue(!test_root.exists());
+  }
+  
   public static class XXDF extends DF {
     public XXDF() throws IOException {
-      super(new File(System.getProperty("test.build.data","/tmp")), 0L);
+      super(test_root, 0L);
     }
 
     @Override

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java Fri Jul 25 20:33:09 2014
@@ -45,15 +45,15 @@ public class TestStat extends FileSystem
     final String doesNotExist;
     final String directory;
     final String file;
-    final String symlink;
+    final String[] symlinks;
     final String stickydir;
 
     StatOutput(String doesNotExist, String directory, String file,
-        String symlink, String stickydir) {
+        String[] symlinks, String stickydir) {
       this.doesNotExist = doesNotExist;
       this.directory = directory;
       this.file = file;
-      this.symlink = symlink;
+      this.symlinks = symlinks;
       this.stickydir = stickydir;
     }
 
@@ -78,10 +78,12 @@ public class TestStat extends FileSystem
       status = stat.getFileStatusForTesting();
       assertTrue(status.isFile());
 
-      br = new BufferedReader(new StringReader(symlink));
-      stat.parseExecResult(br);
-      status = stat.getFileStatusForTesting();
-      assertTrue(status.isSymlink());
+      for (String symlink : symlinks) {
+        br = new BufferedReader(new StringReader(symlink));
+        stat.parseExecResult(br);
+        status = stat.getFileStatusForTesting();
+        assertTrue(status.isSymlink());
+      }
 
       br = new BufferedReader(new StringReader(stickydir));
       stat.parseExecResult(br);
@@ -93,22 +95,30 @@ public class TestStat extends FileSystem
 
   @Test(timeout=10000)
   public void testStatLinux() throws Exception {
+    String[] symlinks = new String[] {
+        "6,symbolic link,1373584236,1373584236,777,andrew,andrew,`link' -> `target'",
+        "6,symbolic link,1373584236,1373584236,777,andrew,andrew,'link' -> 'target'"
+    };
     StatOutput linux = new StatOutput(
         "stat: cannot stat `watermelon': No such file or directory",
         "4096,directory,1373584236,1373586485,755,andrew,root,`.'",
         "0,regular empty file,1373584228,1373584228,644,andrew,andrew,`target'",
-        "6,symbolic link,1373584236,1373584236,777,andrew,andrew,`link' -> `target'",
+        symlinks,
         "4096,directory,1374622334,1375124212,1755,andrew,andrew,`stickydir'");
     linux.test();
   }
 
   @Test(timeout=10000)
   public void testStatFreeBSD() throws Exception {
+    String[] symlinks = new String[] {
+        "6,Symbolic Link,1373508941,1373508941,120755,awang,awang,`link' -> `target'"
+    };
+    
     StatOutput freebsd = new StatOutput(
         "stat: symtest/link: stat: No such file or directory",
         "512,Directory,1373583695,1373583669,40755,awang,awang,`link' -> `'",
         "0,Regular File,1373508937,1373508937,100644,awang,awang,`link' -> `'",
-        "6,Symbolic Link,1373508941,1373508941,120755,awang,awang,`link' -> `target'",
+        symlinks,
         "512,Directory,1375139537,1375139537,41755,awang,awang,`link' -> `'");
     freebsd.test();
   }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java Fri Jul 25 20:33:09 2014
@@ -35,19 +35,22 @@ import org.junit.Before;
 import org.junit.Test;
 
 public class TestPathData {
+  private static final String TEST_ROOT_DIR = 
+      System.getProperty("test.build.data","build/test/data") + "/testPD";
   protected Configuration conf;
   protected FileSystem fs;
   protected Path testDir;
-
+  
   @Before
   public void initialize() throws Exception {
     conf = new Configuration();
     fs = FileSystem.getLocal(conf);
-    testDir = new Path(
-        System.getProperty("test.build.data", "build/test/data") + "/testPD"
-    );
+    testDir = new Path(TEST_ROOT_DIR);
+    
     // don't want scheme on the path, just an absolute path
     testDir = new Path(fs.makeQualified(testDir).toUri().getPath());
+    fs.mkdirs(testDir);
+
     FileSystem.setDefaultUri(conf, fs.getUri());    
     fs.setWorkingDirectory(testDir);
     fs.mkdirs(new Path("d1"));
@@ -60,6 +63,7 @@ public class TestPathData {
 
   @After
   public void cleanup() throws Exception {
+    fs.delete(testDir, true);
     fs.close();
   }