You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cm...@apache.org on 2014/04/18 18:32:44 UTC

svn commit: r1588509 [3/4] - in /hadoop/common/branches/HADOOP-10388/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/ hadoop-a...

Propchange: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1582150-1588387

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Fri Apr 18 16:32:35 2014
@@ -666,9 +666,9 @@ public class Configuration implements It
      }
 
      this.updatingResource = new HashMap<String, String[]>(other.updatingResource);
+     this.finalParameters = new HashSet<String>(other.finalParameters);
    }
    
-    this.finalParameters = new HashSet<String>(other.finalParameters);
     synchronized(Configuration.class) {
       REGISTRY.put(this, null);
     }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java Fri Apr 18 16:32:35 2014
@@ -18,18 +18,22 @@
 
 package org.apache.hadoop.crypto.key;
 
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-
+import org.apache.hadoop.fs.permission.FsPermission;
 import javax.crypto.spec.SecretKeySpec;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
 import java.io.Serializable;
 import java.net.URI;
+import java.net.URL;
 import java.security.Key;
 import java.security.KeyStore;
 import java.security.KeyStoreException;
@@ -42,16 +46,30 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 /**
  * KeyProvider based on Java's KeyStore file format. The file may be stored in
  * any Hadoop FileSystem using the following name mangling:
  *  jks://hdfs@nn1.example.com/my/keys.jks -> hdfs://nn1.example.com/my/keys.jks
  *  jks://file/home/owen/keys.jks -> file:///home/owen/keys.jks
- *
- * The password for the keystore is taken from the HADOOP_KEYSTORE_PASSWORD
- * environment variable with a default of 'none'.
- *
+ * <p/>
+ * If the <code>HADOOP_KEYSTORE_PASSWORD</code> environment variable is set,
+ * its value is used as the password for the keystore.
+ * <p/>
+ * If the <code>HADOOP_KEYSTORE_PASSWORD</code> environment variable is not set,
+ * the password for the keystore is read from file specified in the
+ * {@link #KEYSTORE_PASSWORD_FILE_KEY} configuration property. The password file
+ * is looked up in Hadoop's configuration directory via the classpath.
+ * <p/>
+ * <b>NOTE:</b> Make sure the password in the password file does not have an
+ * ENTER at the end, else it won't be valid for the Java KeyStore.
+ * <p/>
+ * If the environment variable, nor the property are not set, the password used
+ * is 'none'.
+ * <p/>
  * It is expected for encrypted InputFormats and OutputFormats to copy the keys
  * from the original provider into the job's Credentials object, which is
  * accessed via the UserProvider. Therefore, this provider won't be used by
@@ -61,16 +79,23 @@ import java.util.Map;
 public class JavaKeyStoreProvider extends KeyProvider {
   private static final String KEY_METADATA = "KeyMetadata";
   public static final String SCHEME_NAME = "jceks";
-  public static final String KEYSTORE_PASSWORD_NAME =
+
+  public static final String KEYSTORE_PASSWORD_FILE_KEY =
+      "hadoop.security.keystore.java-keystore-provider.password-file";
+
+  public static final String KEYSTORE_PASSWORD_ENV_VAR =
       "HADOOP_KEYSTORE_PASSWORD";
-  public static final String KEYSTORE_PASSWORD_DEFAULT = "none";
+  public static final char[] KEYSTORE_PASSWORD_DEFAULT = "none".toCharArray();
 
   private final URI uri;
   private final Path path;
   private final FileSystem fs;
+  private final FsPermission permissions;
   private final KeyStore keyStore;
-  private final char[] password;
+  private char[] password;
   private boolean changed = false;
+  private Lock readLock;
+  private Lock writeLock;
 
   private final Map<String, Metadata> cache = new HashMap<String, Metadata>();
 
@@ -78,17 +103,40 @@ public class JavaKeyStoreProvider extend
     this.uri = uri;
     path = unnestUri(uri);
     fs = path.getFileSystem(conf);
-    // Get the password from the user's environment
-    String pw = System.getenv(KEYSTORE_PASSWORD_NAME);
-    if (pw == null) {
-      pw = KEYSTORE_PASSWORD_DEFAULT;
+    // Get the password file from the conf, if not present from the user's
+    // environment var
+    if (System.getenv().containsKey(KEYSTORE_PASSWORD_ENV_VAR)) {
+      password = System.getenv(KEYSTORE_PASSWORD_ENV_VAR).toCharArray();
+    }
+    if (password == null) {
+      String pwFile = conf.get(KEYSTORE_PASSWORD_FILE_KEY);
+      if (pwFile != null) {
+        ClassLoader cl = Thread.currentThread().getContextClassLoader();
+        URL pwdFile = cl.getResource(pwFile);
+        if (pwdFile != null) {
+          InputStream is = pwdFile.openStream();
+          try {
+            password = IOUtils.toCharArray(is);
+          } finally {
+            is.close();
+          }
+        }
+      }
+    }
+    if (password == null) {
+      password = KEYSTORE_PASSWORD_DEFAULT;
     }
-    password = pw.toCharArray();
     try {
       keyStore = KeyStore.getInstance(SCHEME_NAME);
       if (fs.exists(path)) {
+        // save off permissions in case we need to
+        // rewrite the keystore in flush()
+        FileStatus s = fs.getFileStatus(path);
+        permissions = s.getPermission();
+
         keyStore.load(fs.open(path), password);
       } else {
+        permissions = new FsPermission("700");
         // required to create an empty keystore. *sigh*
         keyStore.load(null, password);
       }
@@ -99,138 +147,171 @@ public class JavaKeyStoreProvider extend
     } catch (CertificateException e) {
       throw new IOException("Can't load keystore " + path, e);
     }
+    ReadWriteLock lock = new ReentrantReadWriteLock(true);
+    readLock = lock.readLock();
+    writeLock = lock.writeLock();
   }
 
   @Override
   public KeyVersion getKeyVersion(String versionName) throws IOException {
-    SecretKeySpec key = null;
+    readLock.lock();
     try {
-      if (!keyStore.containsAlias(versionName)) {
-        return null;
+      SecretKeySpec key = null;
+      try {
+        if (!keyStore.containsAlias(versionName)) {
+          return null;
+        }
+        key = (SecretKeySpec) keyStore.getKey(versionName, password);
+      } catch (KeyStoreException e) {
+        throw new IOException("Can't get key " + versionName + " from " +
+                              path, e);
+      } catch (NoSuchAlgorithmException e) {
+        throw new IOException("Can't get algorithm for key " + key + " from " +
+                              path, e);
+      } catch (UnrecoverableKeyException e) {
+        throw new IOException("Can't recover key " + key + " from " + path, e);
       }
-      key = (SecretKeySpec) keyStore.getKey(versionName, password);
-    } catch (KeyStoreException e) {
-      throw new IOException("Can't get key " + versionName + " from " +
-                            path, e);
-    } catch (NoSuchAlgorithmException e) {
-      throw new IOException("Can't get algorithm for key " + key + " from " +
-                            path, e);
-    } catch (UnrecoverableKeyException e) {
-      throw new IOException("Can't recover key " + key + " from " + path, e);
+      return new KeyVersion(versionName, key.getEncoded());
+    } finally {
+      readLock.unlock();
     }
-    return new KeyVersion(versionName, key.getEncoded());
   }
 
   @Override
   public List<String> getKeys() throws IOException {
-    ArrayList<String> list = new ArrayList<String>();
-    String alias = null;
+    readLock.lock();
     try {
-      Enumeration<String> e = keyStore.aliases();
-      while (e.hasMoreElements()) {
-         alias = e.nextElement();
-         // only include the metadata key names in the list of names
-         if (!alias.contains("@")) {
-             list.add(alias);
-         }
+      ArrayList<String> list = new ArrayList<String>();
+      String alias = null;
+      try {
+        Enumeration<String> e = keyStore.aliases();
+        while (e.hasMoreElements()) {
+           alias = e.nextElement();
+           // only include the metadata key names in the list of names
+           if (!alias.contains("@")) {
+               list.add(alias);
+           }
+        }
+      } catch (KeyStoreException e) {
+        throw new IOException("Can't get key " + alias + " from " + path, e);
       }
-    } catch (KeyStoreException e) {
-      throw new IOException("Can't get key " + alias + " from " + path, e);
+      return list;
+    } finally {
+      readLock.unlock();
     }
-    return list;
   }
 
   @Override
   public List<KeyVersion> getKeyVersions(String name) throws IOException {
-    List<KeyVersion> list = new ArrayList<KeyVersion>();
-    Metadata km = getMetadata(name);
-    if (km != null) {
-      int latestVersion = km.getVersions();
-      KeyVersion v = null;
-      String versionName = null;
-      for (int i = 0; i < latestVersion; i++) {
-        versionName = buildVersionName(name, i);
-        v = getKeyVersion(versionName);
-        if (v != null) {
-          list.add(v);
+    readLock.lock();
+    try {
+      List<KeyVersion> list = new ArrayList<KeyVersion>();
+      Metadata km = getMetadata(name);
+      if (km != null) {
+        int latestVersion = km.getVersions();
+        KeyVersion v = null;
+        String versionName = null;
+        for (int i = 0; i < latestVersion; i++) {
+          versionName = buildVersionName(name, i);
+          v = getKeyVersion(versionName);
+          if (v != null) {
+            list.add(v);
+          }
         }
       }
+      return list;
+    } finally {
+      readLock.unlock();
     }
-    return list;
   }
 
   @Override
   public Metadata getMetadata(String name) throws IOException {
-    if (cache.containsKey(name)) {
-      return cache.get(name);
-    }
+    readLock.lock();
     try {
-      if (!keyStore.containsAlias(name)) {
-        return null;
+      if (cache.containsKey(name)) {
+        return cache.get(name);
       }
-      Metadata meta = ((KeyMetadata) keyStore.getKey(name, password)).metadata;
-      cache.put(name, meta);
-      return meta;
-    } catch (KeyStoreException e) {
-      throw new IOException("Can't get metadata for " + name +
-          " from keystore " + path, e);
-    } catch (NoSuchAlgorithmException e) {
-      throw new IOException("Can't get algorithm for " + name +
-          " from keystore " + path, e);
-    } catch (UnrecoverableKeyException e) {
-      throw new IOException("Can't recover key for " + name +
-          " from keystore " + path, e);
+      try {
+        if (!keyStore.containsAlias(name)) {
+          return null;
+        }
+        Metadata meta = ((KeyMetadata) keyStore.getKey(name, password)).metadata;
+        cache.put(name, meta);
+        return meta;
+      } catch (KeyStoreException e) {
+        throw new IOException("Can't get metadata for " + name +
+            " from keystore " + path, e);
+      } catch (NoSuchAlgorithmException e) {
+        throw new IOException("Can't get algorithm for " + name +
+            " from keystore " + path, e);
+      } catch (UnrecoverableKeyException e) {
+        throw new IOException("Can't recover key for " + name +
+            " from keystore " + path, e);
+      }
+    } finally {
+      readLock.unlock();
     }
   }
 
   @Override
   public KeyVersion createKey(String name, byte[] material,
                                Options options) throws IOException {
+    writeLock.lock();
     try {
-      if (keyStore.containsAlias(name) || cache.containsKey(name)) {
-        throw new IOException("Key " + name + " already exists in " + this);
+      try {
+        if (keyStore.containsAlias(name) || cache.containsKey(name)) {
+          throw new IOException("Key " + name + " already exists in " + this);
+        }
+      } catch (KeyStoreException e) {
+        throw new IOException("Problem looking up key " + name + " in " + this,
+            e);
       }
-    } catch (KeyStoreException e) {
-      throw new IOException("Problem looking up key " + name + " in " + this,
-          e);
+      Metadata meta = new Metadata(options.getCipher(), options.getBitLength(),
+          options.getDescription(), new Date(), 1);
+      if (options.getBitLength() != 8 * material.length) {
+        throw new IOException("Wrong key length. Required " +
+            options.getBitLength() + ", but got " + (8 * material.length));
+      }
+      cache.put(name, meta);
+      String versionName = buildVersionName(name, 0);
+      return innerSetKeyVersion(versionName, material, meta.getCipher());
+    } finally {
+      writeLock.unlock();
     }
-    Metadata meta = new Metadata(options.getCipher(), options.getBitLength(),
-        new Date(), 1);
-    if (options.getBitLength() != 8 * material.length) {
-      throw new IOException("Wrong key length. Required " +
-          options.getBitLength() + ", but got " + (8 * material.length));
-    }
-    cache.put(name, meta);
-    String versionName = buildVersionName(name, 0);
-    return innerSetKeyVersion(versionName, material, meta.getCipher());
   }
 
   @Override
   public void deleteKey(String name) throws IOException {
-    Metadata meta = getMetadata(name);
-    if (meta == null) {
-      throw new IOException("Key " + name + " does not exist in " + this);
-    }
-    for(int v=0; v < meta.getVersions(); ++v) {
-      String versionName = buildVersionName(name, v);
+    writeLock.lock();
+    try {
+      Metadata meta = getMetadata(name);
+      if (meta == null) {
+        throw new IOException("Key " + name + " does not exist in " + this);
+      }
+      for(int v=0; v < meta.getVersions(); ++v) {
+        String versionName = buildVersionName(name, v);
+        try {
+          if (keyStore.containsAlias(versionName)) {
+            keyStore.deleteEntry(versionName);
+          }
+        } catch (KeyStoreException e) {
+          throw new IOException("Problem removing " + versionName + " from " +
+              this, e);
+        }
+      }
       try {
-        if (keyStore.containsAlias(versionName)) {
-          keyStore.deleteEntry(versionName);
+        if (keyStore.containsAlias(name)) {
+          keyStore.deleteEntry(name);
         }
       } catch (KeyStoreException e) {
-        throw new IOException("Problem removing " + versionName + " from " +
-            this, e);
+        throw new IOException("Problem removing " + name + " from " + this, e);
       }
+      cache.remove(name);
+      changed = true;
+    } finally {
+      writeLock.unlock();
     }
-    try {
-      if (keyStore.containsAlias(name)) {
-        keyStore.deleteEntry(name);
-      }
-    } catch (KeyStoreException e) {
-      throw new IOException("Problem removing " + name + " from " + this, e);
-    }
-    cache.remove(name);
-    changed = true;
   }
 
   KeyVersion innerSetKeyVersion(String versionName, byte[] material,
@@ -249,47 +330,57 @@ public class JavaKeyStoreProvider extend
   @Override
   public KeyVersion rollNewVersion(String name,
                                     byte[] material) throws IOException {
-    Metadata meta = getMetadata(name);
-    if (meta == null) {
-      throw new IOException("Key " + name + " not found");
-    }
-    if (meta.getBitLength() != 8 * material.length) {
-      throw new IOException("Wrong key length. Required " +
-          meta.getBitLength() + ", but got " + (8 * material.length));
-    }
-    int nextVersion = meta.addVersion();
-    String versionName = buildVersionName(name, nextVersion);
-    return innerSetKeyVersion(versionName, material, meta.getCipher());
+    writeLock.lock();
+    try {
+      Metadata meta = getMetadata(name);
+      if (meta == null) {
+        throw new IOException("Key " + name + " not found");
+      }
+      if (meta.getBitLength() != 8 * material.length) {
+        throw new IOException("Wrong key length. Required " +
+            meta.getBitLength() + ", but got " + (8 * material.length));
+      }
+      int nextVersion = meta.addVersion();
+      String versionName = buildVersionName(name, nextVersion);
+      return innerSetKeyVersion(versionName, material, meta.getCipher());
+    } finally {
+      writeLock.unlock();
+    }
   }
 
   @Override
   public void flush() throws IOException {
-    if (!changed) {
-      return;
-    }
-    // put all of the updates into the keystore
-    for(Map.Entry<String, Metadata> entry: cache.entrySet()) {
+    writeLock.lock();
+    try {
+      if (!changed) {
+        return;
+      }
+      // put all of the updates into the keystore
+      for(Map.Entry<String, Metadata> entry: cache.entrySet()) {
+        try {
+          keyStore.setKeyEntry(entry.getKey(), new KeyMetadata(entry.getValue()),
+              password, null);
+        } catch (KeyStoreException e) {
+          throw new IOException("Can't set metadata key " + entry.getKey(),e );
+        }
+      }
+      // write out the keystore
+      FSDataOutputStream out = FileSystem.create(fs, path, permissions);
       try {
-        keyStore.setKeyEntry(entry.getKey(), new KeyMetadata(entry.getValue()),
-            password, null);
+        keyStore.store(out, password);
       } catch (KeyStoreException e) {
-        throw new IOException("Can't set metadata key " + entry.getKey(),e );
+        throw new IOException("Can't store keystore " + this, e);
+      } catch (NoSuchAlgorithmException e) {
+        throw new IOException("No such algorithm storing keystore " + this, e);
+      } catch (CertificateException e) {
+        throw new IOException("Certificate exception storing keystore " + this,
+            e);
       }
+      out.close();
+      changed = false;
+    } finally {
+      writeLock.unlock();
     }
-    // write out the keystore
-    FSDataOutputStream out = fs.create(path, true);
-    try {
-      keyStore.store(out, password);
-    } catch (KeyStoreException e) {
-      throw new IOException("Can't store keystore " + this, e);
-    } catch (NoSuchAlgorithmException e) {
-      throw new IOException("No such algorithm storing keystore " + this, e);
-    } catch (CertificateException e) {
-      throw new IOException("Certificate exception storing keystore " + this,
-          e);
-    }
-    out.close();
-    changed = false;
   }
 
   @Override

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java Fri Apr 18 16:32:35 2014
@@ -24,8 +24,12 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.net.URI;
+import java.security.NoSuchAlgorithmException;
+import java.text.MessageFormat;
 import java.util.Date;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
 
 import com.google.gson.stream.JsonReader;
 import com.google.gson.stream.JsonWriter;
@@ -34,11 +38,15 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
+import javax.crypto.KeyGenerator;
+
 /**
  * A provider of secret key material for Hadoop applications. Provides an
  * abstraction to separate key storage from users of encryption. It
  * is intended to support getting or storing keys in a variety of ways,
  * including third party bindings.
+ * <P/>
+ * <code>KeyProvider</code> implementations must be thread safe.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
@@ -99,21 +107,34 @@ public abstract class KeyProvider {
     private final static String CIPHER_FIELD = "cipher";
     private final static String BIT_LENGTH_FIELD = "bitLength";
     private final static String CREATED_FIELD = "created";
+    private final static String DESCRIPTION_FIELD = "description";
     private final static String VERSIONS_FIELD = "versions";
 
     private final String cipher;
     private final int bitLength;
+    private final String description;
     private final Date created;
     private int versions;
 
     protected Metadata(String cipher, int bitLength,
-                       Date created, int versions) {
+                       String description, Date created, int versions) {
       this.cipher = cipher;
       this.bitLength = bitLength;
+      this.description = description;
       this.created = created;
       this.versions = versions;
     }
 
+    public String toString() {
+      return MessageFormat.format(
+          "cipher: {0}, length: {1} description: {2} created: {3} version: {4}",
+          cipher, bitLength, description, created, versions);
+    }
+
+    public String getDescription() {
+      return description;
+    }
+
     public Date getCreated() {
       return created;
     }
@@ -165,6 +186,9 @@ public abstract class KeyProvider {
       if (created != null) {
         writer.name(CREATED_FIELD).value(created.getTime());
       }
+      if (description != null) {
+        writer.name(DESCRIPTION_FIELD).value(description);
+      }
       writer.name(VERSIONS_FIELD).value(versions);
       writer.endObject();
       writer.flush();
@@ -181,6 +205,7 @@ public abstract class KeyProvider {
       int bitLength = 0;
       Date created = null;
       int versions = 0;
+      String description = null;
       JsonReader reader = new JsonReader(new InputStreamReader
           (new ByteArrayInputStream(bytes)));
       reader.beginObject();
@@ -194,12 +219,15 @@ public abstract class KeyProvider {
           created = new Date(reader.nextLong());
         } else if (VERSIONS_FIELD.equals(field)) {
           versions = reader.nextInt();
+        } else if (DESCRIPTION_FIELD.equals(field)) {
+          description = reader.nextString();
         }
       }
       reader.endObject();
       this.cipher = cipher;
       this.bitLength = bitLength;
       this.created = created;
+      this.description = description;
       this.versions = versions;
     }
   }
@@ -210,6 +238,7 @@ public abstract class KeyProvider {
   public static class Options {
     private String cipher;
     private int bitLength;
+    private String description;
 
     public Options(Configuration conf) {
       cipher = conf.get(DEFAULT_CIPHER_NAME, DEFAULT_CIPHER);
@@ -226,13 +255,22 @@ public abstract class KeyProvider {
       return this;
     }
 
-    protected String getCipher() {
+    public Options setDescription(String description) {
+      this.description = description;
+      return this;
+    }
+
+    public String getCipher() {
       return cipher;
     }
 
-    protected int getBitLength() {
+    public int getBitLength() {
       return bitLength;
     }
+
+    public String getDescription() {
+      return description;
+    }
   }
 
   /**
@@ -272,6 +310,24 @@ public abstract class KeyProvider {
    */
   public abstract List<String> getKeys() throws IOException;
 
+
+  /**
+   * Get the key metadata for all keys.
+   *
+   * @return a Map with all the keys and their metadata
+   * @throws IOException
+   */
+  public Map<String, Metadata> getKeysMetadata() throws IOException {
+    Map<String, Metadata> keysMetadata = new LinkedHashMap<String, Metadata>();
+    for (String key : getKeys()) {
+      Metadata meta = getMetadata(key);
+      if (meta != null) {
+        keysMetadata.put(key, meta);
+      }
+    }
+    return keysMetadata;
+  }
+
   /**
    * Get the key material for all versions of a specific key name.
    * @return the list of key material
@@ -315,6 +371,56 @@ public abstract class KeyProvider {
                                        Options options) throws IOException;
 
   /**
+   * Get the algorithm from the cipher.
+   *
+   * @return the algorithm name
+   */
+  private String getAlgorithm(String cipher) {
+    int slash = cipher.indexOf('/');
+    if (slash == -1) {
+      return cipher;
+    } else {
+      return cipher.substring(0, slash);
+    }
+  }
+
+  /**
+   * Generates a key material.
+   *
+   * @param size length of the key.
+   * @param algorithm algorithm to use for generating the key.
+   * @return the generated key.
+   * @throws NoSuchAlgorithmException
+   */
+  protected byte[] generateKey(int size, String algorithm)
+      throws NoSuchAlgorithmException {
+    algorithm = getAlgorithm(algorithm);
+    KeyGenerator keyGenerator = KeyGenerator.getInstance(algorithm);
+    keyGenerator.init(size);
+    byte[] key = keyGenerator.generateKey().getEncoded();
+    return key;
+  }
+
+  /**
+   * Create a new key generating the material for it.
+   * The given key must not already exist.
+   * <p/>
+   * This implementation generates the key material and calls the
+   * {@link #createKey(String, byte[], Options)} method.
+   *
+   * @param name the base name of the key
+   * @param options the options for the new key.
+   * @return the version name of the first version of the key.
+   * @throws IOException
+   * @throws NoSuchAlgorithmException
+   */
+  public KeyVersion createKey(String name, Options options)
+      throws NoSuchAlgorithmException, IOException {
+    byte[] material = generateKey(options.getBitLength(), options.getCipher());
+    return createKey(name, material, options);
+  }
+
+  /**
    * Delete the given key.
    * @param name the name of the key to delete
    * @throws IOException
@@ -333,6 +439,23 @@ public abstract class KeyProvider {
                                             ) throws IOException;
 
   /**
+   * Roll a new version of the given key generating the material for it.
+   * <p/>
+   * This implementation generates the key material and calls the
+   * {@link #rollNewVersion(String, byte[])} method.
+   *
+   * @param name the basename of the key
+   * @return the name of the new version of the key
+   * @throws IOException
+   */
+  public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
+                                                       IOException {
+    Metadata meta = getMetadata(name);
+    byte[] material = generateKey(meta.getBitLength(), meta.getCipher());
+    return rollNewVersion(name, material);
+  }
+
+  /**
    * Ensures that any changes to the keys are written to persistent store.
    * @throws IOException
    */

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java Fri Apr 18 16:32:35 2014
@@ -23,6 +23,7 @@ import java.io.PrintStream;
 import java.security.InvalidParameterException;
 import java.security.NoSuchAlgorithmException;
 import java.util.List;
+import java.util.Map;
 
 import javax.crypto.KeyGenerator;
 
@@ -45,6 +46,7 @@ public class KeyShell extends Configured
       "   [" + RollCommand.USAGE + "]\n" +
       "   [" + DeleteCommand.USAGE + "]\n" +
       "   [" + ListCommand.USAGE + "]\n";
+  private static final String LIST_METADATA = "keyShell.list.metadata";
 
   private boolean interactive = false;
   private Command command = null;
@@ -121,6 +123,8 @@ public class KeyShell extends Configured
       } else if (args[i].equals("--provider")) {
         userSuppliedProvider = true;
         getConf().set(KeyProviderFactory.KEY_PROVIDER_PATH, args[++i]);
+      } else if (args[i].equals("--metadata")) {
+        getConf().setBoolean(LIST_METADATA, true);
       } else if (args[i].equals("-i") || (args[i].equals("--interactive"))) {
         interactive = true;
       } else if (args[i].equals("--help")) {
@@ -185,16 +189,6 @@ public class KeyShell extends Configured
       return provider;
     }
 
-    protected byte[] generateKey(int size, String algorithm)
-        throws NoSuchAlgorithmException {
-      out.println("Generating key using size: " + size + " and algorithm: "
-          + algorithm);
-      KeyGenerator keyGenerator = KeyGenerator.getInstance(algorithm);
-      keyGenerator.init(size);
-      byte[] key = keyGenerator.generateKey().getEncoded();
-      return key;
-    }
-
     protected void printProviderWritten() {
         out.println(provider.getClass().getName() + " has been updated.");
     }
@@ -211,11 +205,15 @@ public class KeyShell extends Configured
   }
 
   private class ListCommand extends Command {
-    public static final String USAGE = "list <keyname> [--provider] [--help]";
+    public static final String USAGE =
+        "list [--provider] [--metadata] [--help]";
     public static final String DESC =
         "The list subcommand displays the keynames contained within \n" +
         "a particular provider - as configured in core-site.xml or " +
-        "indicated\nthrough the --provider argument.";
+        "indicated\nthrough the --provider argument.\n" +
+        "If the --metadata option is used, the keys metadata will be printed";
+
+    private boolean metadata = false;
 
     public boolean validate() {
       boolean rc = true;
@@ -227,16 +225,24 @@ public class KeyShell extends Configured
             + "you MUST use the --provider argument.");
         rc = false;
       }
+      metadata = getConf().getBoolean(LIST_METADATA, false);
       return rc;
     }
 
     public void execute() throws IOException {
       List<String> keys;
       try {
-        keys = provider.getKeys();
         out.println("Listing keys for KeyProvider: " + provider.toString());
-        for (String keyName : keys) {
-          out.println(keyName);
+        if (metadata) {
+          Map<String, Metadata> keysMeta = provider.getKeysMetadata();
+          for (Map.Entry<String, Metadata> entry : keysMeta.entrySet()) {
+            out.println(entry.getKey() + " : " + entry.getValue());
+          }
+        } else {
+          keys = provider.getKeys();
+          for (String keyName : keys) {
+            out.println(keyName);
+          }
         }
       } catch (IOException e) {
         out.println("Cannot list keys for KeyProvider: " + provider.toString()
@@ -289,9 +295,7 @@ public class KeyShell extends Configured
         out.println("Rolling key version from KeyProvider: "
             + provider.toString() + " for key name: " + keyName);
         try {
-          byte[] material = null;
-          material = generateKey(md.getBitLength(), md.getAlgorithm());
-          provider.rollNewVersion(keyName, material);
+          provider.rollNewVersion(keyName);
           out.println(keyName + " has been successfully rolled.");
           provider.flush();
           printProviderWritten();
@@ -423,9 +427,7 @@ public class KeyShell extends Configured
       warnIfTransientProvider();
       try {
         Options options = KeyProvider.options(getConf());
-        String alg = getAlgorithm(options.getCipher());
-        byte[] material = generateKey(options.getBitLength(), alg);
-        provider.createKey(keyName, material, options);
+        provider.createKey(keyName, options);
         out.println(keyName + " has been successfully created.");
         provider.flush();
         printProviderWritten();
@@ -441,19 +443,6 @@ public class KeyShell extends Configured
       }
     }
 
-    /**
-     * Get the algorithm from the cipher.
-     * @return the algorithm name
-     */
-    public String getAlgorithm(String cipher) {
-      int slash = cipher.indexOf('/');
-      if (slash == - 1) {
-        return cipher;
-      } else {
-        return cipher.substring(0, slash);
-      }
-    }
-
     @Override
     public String getUsage() {
       return USAGE + ":\n\n" + DESC;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java Fri Apr 18 16:32:35 2014
@@ -55,7 +55,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public KeyVersion getKeyVersion(String versionName) {
+  public synchronized KeyVersion getKeyVersion(String versionName) {
     byte[] bytes = credentials.getSecretKey(new Text(versionName));
     if (bytes == null) {
       return null;
@@ -64,7 +64,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public Metadata getMetadata(String name) throws IOException {
+  public synchronized Metadata getMetadata(String name) throws IOException {
     if (cache.containsKey(name)) {
       return cache.get(name);
     }
@@ -78,7 +78,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public KeyVersion createKey(String name, byte[] material,
+  public synchronized KeyVersion createKey(String name, byte[] material,
                                Options options) throws IOException {
     Text nameT = new Text(name);
     if (credentials.getSecretKey(nameT) != null) {
@@ -89,7 +89,7 @@ public class UserProvider extends KeyPro
           options.getBitLength() + ", but got " + (8 * material.length));
     }
     Metadata meta = new Metadata(options.getCipher(), options.getBitLength(),
-        new Date(), 1);
+        options.getDescription(), new Date(), 1);
     cache.put(name, meta);
     String versionName = buildVersionName(name, 0);
     credentials.addSecretKey(nameT, meta.serialize());
@@ -98,7 +98,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public void deleteKey(String name) throws IOException {
+  public synchronized void deleteKey(String name) throws IOException {
     Metadata meta = getMetadata(name);
     if (meta == null) {
       throw new IOException("Key " + name + " does not exist in " + this);
@@ -111,7 +111,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public KeyVersion rollNewVersion(String name,
+  public synchronized KeyVersion rollNewVersion(String name,
                                     byte[] material) throws IOException {
     Metadata meta = getMetadata(name);
     if (meta == null) {
@@ -134,7 +134,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public void flush() {
+  public synchronized void flush() {
     user.addCredentials(credentials);
   }
 
@@ -151,7 +151,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public List<String> getKeys() throws IOException {
+  public synchronized List<String> getKeys() throws IOException {
     List<String> list = new ArrayList<String>();
     List<Text> keys = credentials.getAllSecretKeys();
     for (Text key : keys) {
@@ -163,7 +163,7 @@ public class UserProvider extends KeyPro
   }
 
   @Override
-  public List<KeyVersion> getKeyVersions(String name) throws IOException {
+  public synchronized List<KeyVersion> getKeyVersions(String name) throws IOException {
       List<KeyVersion> list = new ArrayList<KeyVersion>();
       Metadata km = getMetadata(name);
       if (km != null) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java Fri Apr 18 16:32:35 2014
@@ -22,7 +22,6 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.EnumSet;
 import java.util.NoSuchElementException;
 import java.util.StringTokenizer;
 
@@ -75,6 +74,8 @@ public class DF extends Shell {
       return this.filesystem;
     } else {
       run();
+      verifyExitCode();
+      parseOutput();
       return filesystem;
     }
   }
@@ -114,14 +115,7 @@ public class DF extends Shell {
       this.mount = dirFile.getCanonicalPath().substring(0, 2);
     } else {
       run();
-      // Skip parsing if df was not successful
-      if (getExitCode() != 0) {
-        StringBuffer sb = new StringBuffer("df could not be run successfully: ");
-        for (String line: output) {
-          sb.append(line);
-        }
-        throw new IOException(sb.toString());
-      }
+      verifyExitCode();
       parseOutput();
     }
 
@@ -204,6 +198,17 @@ public class DF extends Shell {
     }
   }
 
+  private void verifyExitCode() throws IOException {
+    if (getExitCode() != 0) {
+      StringBuilder sb =
+          new StringBuilder("df could not be run successfully: ");
+      for (String line : output) {
+        sb.append(line);
+      }
+      throw new IOException(sb.toString());
+    }
+  }
+
   public static void main(String[] args) throws Exception {
     String path = ".";
     if (args.length > 0)

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java Fri Apr 18 16:32:35 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.util.Options;
@@ -836,21 +837,24 @@ public class MapFile {
 
     Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getLocal(conf);
-    MapFile.Reader reader = new MapFile.Reader(fs, in, conf);
-    MapFile.Writer writer =
-      new MapFile.Writer(conf, fs, out,
-          reader.getKeyClass().asSubclass(WritableComparable.class),
-          reader.getValueClass());
-
-    WritableComparable key =
-      ReflectionUtils.newInstance(reader.getKeyClass().asSubclass(WritableComparable.class), conf);
-    Writable value =
-      ReflectionUtils.newInstance(reader.getValueClass().asSubclass(Writable.class), conf);
-
-    while (reader.next(key, value))               // copy all entries
-      writer.append(key, value);
-
-    writer.close();
+    MapFile.Reader reader = null;
+    MapFile.Writer writer = null;
+    try {
+      reader = new MapFile.Reader(fs, in, conf);
+      writer =
+        new MapFile.Writer(conf, fs, out,
+            reader.getKeyClass().asSubclass(WritableComparable.class),
+            reader.getValueClass());
+
+      WritableComparable key = ReflectionUtils.newInstance(reader.getKeyClass()
+        .asSubclass(WritableComparable.class), conf);
+      Writable value = ReflectionUtils.newInstance(reader.getValueClass()
+        .asSubclass(Writable.class), conf);
+
+      while (reader.next(key, value))               // copy all entries
+        writer.append(key, value);
+    } finally {
+      IOUtils.cleanup(LOG, writer, reader);
+    }
   }
-
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java Fri Apr 18 16:32:35 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.ipc;
 
 import java.util.Arrays;
 import java.util.UUID;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -183,6 +184,8 @@ public class RetryCache {
   private final long expirationTime;
   private String cacheName;
 
+  private final ReentrantLock lock = new ReentrantLock();
+
   /**
    * Constructor
    * @param cacheName name to identify the cache by
@@ -206,6 +209,13 @@ public class RetryCache {
         || Arrays.equals(Server.getClientId(), RpcConstants.DUMMY_CLIENT_ID);
   }
 
+  public void lock() {
+    this.lock.lock();
+  }
+
+  public void unlock() {
+    this.lock.unlock();
+  }
 
   private void incrCacheClearedCounter() {
     retryCacheMetrics.incrCacheCleared();
@@ -247,7 +257,8 @@ public class RetryCache {
    */
   private CacheEntry waitForCompletion(CacheEntry newEntry) {
     CacheEntry mapEntry = null;
-    synchronized (this) {
+    lock.lock();
+    try {
       mapEntry = set.get(newEntry);
       // If an entry in the cache does not exist, add a new one
       if (mapEntry == null) {
@@ -262,6 +273,8 @@ public class RetryCache {
       } else {
         retryCacheMetrics.incrCacheHit();
       }
+    } finally {
+      lock.unlock();
     }
     // Entry already exists in cache. Wait for completion and return its state
     Preconditions.checkNotNull(mapEntry,
@@ -292,8 +305,11 @@ public class RetryCache {
   public void addCacheEntry(byte[] clientId, int callId) {
     CacheEntry newEntry = new CacheEntry(clientId, callId, System.nanoTime()
         + expirationTime, true);
-    synchronized(this) {
+    lock.lock();
+    try {
       set.put(newEntry);
+    } finally {
+      lock.unlock();
     }
     retryCacheMetrics.incrCacheUpdated();
   }
@@ -303,8 +319,11 @@ public class RetryCache {
     // since the entry is loaded from editlog, we can assume it succeeded.    
     CacheEntry newEntry = new CacheEntryWithPayload(clientId, callId, payload,
         System.nanoTime() + expirationTime, true);
-    synchronized(this) {
+    lock.lock();
+    try {
       set.put(newEntry);
+    } finally {
+      lock.unlock();
     }
     retryCacheMetrics.incrCacheUpdated();
   }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Apr 18 16:32:35 2014
@@ -1983,7 +1983,7 @@ public abstract class Server {
         // authentication
         if (user != null && user.getRealUser() != null
             && (authMethod != AuthMethod.TOKEN)) {
-          ProxyUsers.authorize(user, this.getHostAddress(), conf);
+          ProxyUsers.authorize(user, this.getHostAddress());
         }
         authorize(user, protocolName, getHostInetAddress());
         if (LOG.isDebugEnabled()) {
@@ -2107,16 +2107,15 @@ public abstract class Server {
             if (e instanceof UndeclaredThrowableException) {
               e = e.getCause();
             }
-            String logMsg = Thread.currentThread().getName() + ", call " + call + ": error: " + e;
-            if (e instanceof RuntimeException || e instanceof Error) {
+            String logMsg = Thread.currentThread().getName() + ", call " + call;
+            if (exceptionsHandler.isTerse(e.getClass())) {
+              // Don't log the whole stack trace. Way too noisy!
+              LOG.info(logMsg + ": " + e);
+            } else if (e instanceof RuntimeException || e instanceof Error) {
               // These exception types indicate something is probably wrong
               // on the server side, as opposed to just a normal exceptional
               // result.
               LOG.warn(logMsg, e);
-            } else if (exceptionsHandler.isTerse(e.getClass())) {
-             // Don't log the whole stack trace of these exceptions.
-              // Way too noisy!
-              LOG.info(logMsg);
             } else {
               LOG.info(logMsg, e);
             }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSink.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSink.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSink.java Fri Apr 18 16:32:35 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.metrics2;
 
+import java.io.Closeable;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
@@ -26,7 +28,9 @@ import org.apache.hadoop.classification.
  * Implementations of this interface consume the {@link MetricsRecord} generated
  * from {@link MetricsSource}. It registers with {@link MetricsSystem} which
  * periodically pushes the {@link MetricsRecord} to the sink using
- * {@link #putMetrics(MetricsRecord)} method.
+ * {@link #putMetrics(MetricsRecord)} method.  If the implementing class also
+ * implements {@link Closeable}, then the MetricsSystem will close the sink when
+ * it is stopped.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java Fri Apr 18 16:32:35 2014
@@ -85,7 +85,7 @@ class MetricsConfig extends SubsetConfig
   private ClassLoader pluginLoader;
 
   MetricsConfig(Configuration c, String prefix) {
-    super(c, prefix.toLowerCase(Locale.US), ".");
+    super(c, prefix, ".");
   }
 
   static MetricsConfig create(String prefix) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java Fri Apr 18 16:32:35 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metrics2.impl;
 
+import java.io.Closeable;
 import java.util.Random;
 import java.util.concurrent.*;
 
@@ -25,6 +26,7 @@ import static com.google.common.base.Pre
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
 import org.apache.hadoop.metrics2.lib.MetricsRegistry;
 import org.apache.hadoop.metrics2.lib.MutableCounterInt;
@@ -198,6 +200,9 @@ class MetricsSinkAdapter implements Sink
     } catch (InterruptedException e) {
       LOG.warn("Stop interrupted", e);
     }
+    if (sink instanceof Closeable) {
+      IOUtils.cleanup(LOG, (Closeable)sink);
+    }
   }
 
   String name() {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/FileSink.java Fri Apr 18 16:32:35 2014
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.metrics2.sink;
 
+import java.io.Closeable;
 import java.io.File;
 import java.io.FileWriter;
+import java.io.IOException;
 import java.io.PrintWriter;
 
 import org.apache.commons.configuration.SubsetConfiguration;
@@ -36,7 +38,7 @@ import org.apache.hadoop.metrics2.Metric
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class FileSink implements MetricsSink {
+public class FileSink implements MetricsSink, Closeable {
   private static final String FILENAME_KEY = "filename";
   private PrintWriter writer;
 
@@ -81,4 +83,9 @@ public class FileSink implements Metrics
   public void flush() {
     writer.flush();
   }
+
+  @Override
+  public void close() throws IOException {
+    writer.close();
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java Fri Apr 18 16:32:35 2014
@@ -31,7 +31,6 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java Fri Apr 18 16:32:35 2014
@@ -251,7 +251,6 @@ public class LdapGroupsMapping
     return groups;
   }
 
-  @SuppressWarnings("deprecation")
   DirContext getDirContext() throws NamingException {
     if (ctx == null) {
       // Set up the initial environment for LDAP connectivity

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java Fri Apr 18 16:32:35 2014
@@ -39,9 +39,6 @@ import org.apache.commons.logging.LogFac
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 public class NetgroupCache {
-
-  private static final Log LOG = LogFactory.getLog(NetgroupCache.class);
-
   private static boolean netgroupToUsersMapUpdated = true;
   private static Map<String, Set<String>> netgroupToUsersMap =
     new ConcurrentHashMap<String, Set<String>>();

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Fri Apr 18 16:32:35 2014
@@ -30,7 +30,6 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.TreeMap;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -57,7 +56,6 @@ import org.apache.hadoop.ipc.StandbyExce
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.util.StringUtils;
 
 /**
  * A utility class for dealing with SASL on RPC server
@@ -67,10 +65,7 @@ import org.apache.hadoop.util.StringUtil
 public class SaslRpcServer {
   public static final Log LOG = LogFactory.getLog(SaslRpcServer.class);
   public static final String SASL_DEFAULT_REALM = "default";
-  public static final Map<String, String> SASL_PROPS = 
-      new TreeMap<String, String>();
   private static SaslServerFactory saslFactory;
-  private static SaslPropertiesResolver resolver;
 
   public static enum QualityOfProtection {
     AUTHENTICATION("auth"),

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Fri Apr 18 16:32:35 2014
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.security;
 
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import java.io.File;
 import java.io.IOException;
@@ -30,6 +31,7 @@ import java.security.Principal;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -45,9 +47,9 @@ import javax.security.auth.kerberos.Kerb
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.kerberos.KerberosTicket;
 import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
 import javax.security.auth.login.LoginContext;
 import javax.security.auth.login.LoginException;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
 import javax.security.auth.spi.LoginModule;
 
 import org.apache.commons.logging.Log;
@@ -68,7 +70,6 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
-import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -990,7 +991,9 @@ public class UserGroupInformation {
     // register most recent relogin attempt
     user.setLastLogin(now);
     try {
-      LOG.info("Initiating logout for " + getUserName());
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Initiating logout for " + getUserName());
+      }
       synchronized (UserGroupInformation.class) {
         // clear up the kerberos state. But the tokens are not cleared! As per
         // the Java kerberos login module code, only the kerberos credentials
@@ -1001,7 +1004,9 @@ public class UserGroupInformation {
         login = newLoginContext(
             HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, getSubject(),
             new HadoopConfiguration());
-        LOG.info("Initiating re-login for " + keytabPrincipal);
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("Initiating re-login for " + keytabPrincipal);
+        }
         start = Time.now();
         login.login();
         metrics.loginSuccess.add(Time.now() - start);
@@ -1042,7 +1047,9 @@ public class UserGroupInformation {
     // register most recent relogin attempt
     user.setLastLogin(now);
     try {
-      LOG.info("Initiating logout for " + getUserName());
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Initiating logout for " + getUserName());
+      }
       //clear up the kerberos state. But the tokens are not cleared! As per 
       //the Java kerberos login module code, only the kerberos credentials
       //are cleared
@@ -1052,7 +1059,9 @@ public class UserGroupInformation {
       login = 
         newLoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME, 
             getSubject(), new HadoopConfiguration());
-      LOG.info("Initiating re-login for " + getUserName());
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Initiating re-login for " + getUserName());
+      }
       login.login();
       setLogin(login);
     } catch (LoginException le) {
@@ -1407,7 +1416,7 @@ public class UserGroupInformation {
   public synchronized
   Collection<Token<? extends TokenIdentifier>> getTokens() {
     return Collections.unmodifiableCollection(
-        getCredentialsInternal().getAllTokens());
+        new ArrayList<Token<?>>(getCredentialsInternal().getAllTokens()));
   }
 
   /**

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java Fri Apr 18 16:32:35 2014
@@ -19,10 +19,12 @@
 package org.apache.hadoop.security.authorize;
 
 import java.net.InetAddress;
+import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -41,12 +43,16 @@ public class ProxyUsers {
   public static final String CONF_GROUPS = ".groups";
   public static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
   public static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
+  public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
+  
   private static boolean init = false;
   // list of groups and hosts per proxyuser
   private static Map<String, Collection<String>> proxyGroups = 
     new HashMap<String, Collection<String>>();
   private static Map<String, Collection<String>> proxyHosts = 
     new HashMap<String, Collection<String>>();
+  private static Collection<String> proxyServers =
+    new HashSet<String>();
 
   /**
    * reread the conf and get new values for "hadoop.proxyuser.*.groups/hosts"
@@ -62,15 +68,16 @@ public class ProxyUsers {
    */
   public static synchronized void refreshSuperUserGroupsConfiguration(Configuration conf) {
     
-    // remove alle existing stuff
+    // remove all existing stuff
     proxyGroups.clear();
     proxyHosts.clear();
+    proxyServers.clear();
 
     // get all the new keys for groups
     String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS;
     Map<String,String> allMatchKeys = conf.getValByRegex(regex);
     for(Entry<String, String> entry : allMatchKeys.entrySet()) {
-      Collection<String> groups = StringUtils.getStringCollection(entry.getValue());
+      Collection<String> groups = StringUtils.getTrimmedStringCollection(entry.getValue());
       proxyGroups.put(entry.getKey(), groups );
       //cache the groups. This is needed for NetGroups
       Groups.getUserToGroupsMappingService(conf).cacheGroupsAdd(
@@ -82,12 +89,26 @@ public class ProxyUsers {
     allMatchKeys = conf.getValByRegex(regex);
     for(Entry<String, String> entry : allMatchKeys.entrySet()) {
       proxyHosts.put(entry.getKey(),
-          StringUtils.getStringCollection(entry.getValue()));
+          StringUtils.getTrimmedStringCollection(entry.getValue()));
     }
     
+    // trusted proxy servers such as http proxies
+    for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
+      InetSocketAddress addr = new InetSocketAddress(host, 0);
+      if (!addr.isUnresolved()) {
+        proxyServers.add(addr.getAddress().getHostAddress());
+      }
+    }
     init = true;
   }
 
+  public static synchronized boolean isProxyServer(String remoteAddr) { 
+    if(!init) {
+      refreshSuperUserGroupsConfiguration(); 
+    }
+    return proxyServers.contains(remoteAddr);
+  }
+
   /**
    * Returns configuration key for effective user groups allowed for a superuser
    * 
@@ -113,11 +134,10 @@ public class ProxyUsers {
    * 
    * @param user ugi of the effective or proxy user which contains a real user
    * @param remoteAddress the ip address of client
-   * @param newConf configuration
    * @throws AuthorizationException
    */
   public static synchronized void authorize(UserGroupInformation user, 
-      String remoteAddress, Configuration newConf) throws AuthorizationException {
+      String remoteAddress) throws AuthorizationException {
 
     if(!init) {
       refreshSuperUserGroupsConfiguration(); 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java Fri Apr 18 16:32:35 2014
@@ -127,10 +127,14 @@ public class SSLFactory implements Conne
   }
 
   private HostnameVerifier getHostnameVerifier(Configuration conf)
+      throws GeneralSecurityException, IOException {
+    return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").
+        trim().toUpperCase());
+  }
+
+  public static HostnameVerifier getHostnameVerifier(String verifier)
     throws GeneralSecurityException, IOException {
     HostnameVerifier hostnameVerifier;
-    String verifier =
-      conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim().toUpperCase();
     if (verifier.equals("DEFAULT")) {
       hostnameVerifier = SSLHostnameVerifier.DEFAULT;
     } else if (verifier.equals("DEFAULT_AND_LOCALHOST")) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java Fri Apr 18 16:32:35 2014
@@ -31,9 +31,6 @@
 
 package org.apache.hadoop.security.ssl;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
 import java.io.IOException;
 import java.io.InputStream;
 import java.security.cert.Certificate;
@@ -44,6 +41,7 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Set;
 import java.util.StringTokenizer;
 import java.util.TreeSet;
 
@@ -52,6 +50,9 @@ import javax.net.ssl.SSLPeerUnverifiedEx
 import javax.net.ssl.SSLSession;
 import javax.net.ssl.SSLSocket;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
 /**
  ************************************************************************
  * Copied from the not-yet-commons-ssl project at
@@ -224,7 +225,6 @@ public interface SSLHostnameVerifier ext
             public final String toString() { return "ALLOW_ALL"; }
         };
 
-    @SuppressWarnings("unchecked")
     abstract class AbstractVerifier implements SSLHostnameVerifier {
 
         /**
@@ -378,7 +378,7 @@ public interface SSLHostnameVerifier ext
             // STRICT implementations of the HostnameVerifier only use the
             // first CN provided.  All other CNs are ignored.
             // (Firefox, wget, curl, Sun Java 1.4, 5, 6 all work this way).
-            TreeSet names = new TreeSet();
+            final Set<String> names = new TreeSet<String>();
             if (cns != null && cns.length > 0 && cns[0] != null) {
                 names.add(cns[0]);
                 if (ie6) {
@@ -404,10 +404,9 @@ public interface SSLHostnameVerifier ext
 
             boolean match = false;
             out:
-            for (Iterator it = names.iterator(); it.hasNext();) {
+            for (Iterator<String> it = names.iterator(); it.hasNext();) {
                 // Don't trim the CN, though!
-                String cn = (String) it.next();
-                cn = cn.toLowerCase();
+                final String cn = it.next().toLowerCase();
                 // Store CN in StringBuffer in case we need to report an error.
                 buf.append(" <");
                 buf.append(cn);
@@ -508,10 +507,9 @@ public interface SSLHostnameVerifier ext
         }
     }
 
-    @SuppressWarnings("unchecked")
     static class Certificates {
       public static String[] getCNs(X509Certificate cert) {
-        LinkedList cnList = new LinkedList();
+        final List<String> cnList = new LinkedList<String>();
         /*
           Sebastian Hauer's original StrictSSLProtocolSocketFactory used
           getName() and had the following comment:
@@ -568,8 +566,8 @@ public interface SSLHostnameVerifier ext
        * @return Array of SubjectALT DNS names stored in the certificate.
        */
       public static String[] getDNSSubjectAlts(X509Certificate cert) {
-          LinkedList subjectAltList = new LinkedList();
-          Collection c = null;
+          final List<String> subjectAltList = new LinkedList<String>();
+          Collection<List<?>> c = null;
           try {
               c = cert.getSubjectAlternativeNames();
           }
@@ -578,9 +576,9 @@ public interface SSLHostnameVerifier ext
               cpe.printStackTrace();
           }
           if (c != null) {
-              Iterator it = c.iterator();
+              Iterator<List<?>> it = c.iterator();
               while (it.hasNext()) {
-                  List list = (List) it.next();
+                  List<?> list = it.next();
                   int type = ((Integer) list.get(0)).intValue();
                   // If type is 2, then we've got a dNSName
                   if (type == 2) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java Fri Apr 18 16:32:35 2014
@@ -162,7 +162,7 @@ public class Token<T extends TokenIdenti
 
   /**
    * Set the token kind. This is only intended to be used by services that
-   * wrap another service's token, such as HFTP wrapping HDFS.
+   * wrap another service's token.
    * @param newKind
    */
   @InterfaceAudience.Private

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Fri Apr 18 16:32:35 2014
@@ -28,9 +28,11 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Iterator;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.Set;
 import java.util.StringTokenizer;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -351,12 +353,15 @@ public class StringUtils {
 
   /**
    * Splits a comma separated value <code>String</code>, trimming leading and trailing whitespace on each value.
+   * Duplicate and empty values are removed.
    * @param str a comma separated <String> with values
    * @return a <code>Collection</code> of <code>String</code> values
    */
   public static Collection<String> getTrimmedStringCollection(String str){
-    return new ArrayList<String>(
+    Set<String> set = new LinkedHashSet<String>(
       Arrays.asList(getTrimmedStrings(str)));
+    set.remove("");
+    return set;
   }
   
   /**

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj Fri Apr 18 16:32:35 2014
@@ -58,6 +58,7 @@
     <SnappyInclude Condition="Exists('$(CustomSnappyInclude)') And '$(SnappyInclude)' == ''">$(CustomSnappyInclude)</SnappyInclude>
     <SnappyEnabled Condition="'$(SnappyLib)' != '' And '$(SnappyInclude)' != ''">true</SnappyEnabled>
     <IncludePath Condition="'$(SnappyEnabled)' == 'true'">$(SnappyInclude);$(IncludePath)</IncludePath>
+    <IncludePath Condition="Exists('$(ZLIB_HOME)')">$(ZLIB_HOME);$(IncludePath)</IncludePath>
   </PropertyGroup>
   <Target Name="CheckRequireSnappy">
     <Error
@@ -92,6 +93,8 @@
     <ClCompile Include="src\org\apache\hadoop\io\compress\snappy\SnappyDecompressor.c" Condition="'$(SnappyEnabled)' == 'true'">
       <AdditionalOptions>/D HADOOP_SNAPPY_LIBRARY=L\"snappy.dll\"</AdditionalOptions>
     </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\zlib\ZlibCompressor.c" Condition="Exists('$(ZLIB_HOME)')" />
+    <ClCompile Include="src\org\apache\hadoop\io\compress\zlib\ZlibDecompressor.c" Condition="Exists('$(ZLIB_HOME)')" />
     <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c" />
     <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4hc.c" />
     <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c" />
@@ -109,6 +112,9 @@
     <ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h" />
     <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
     <ClInclude Include="src\org\apache\hadoop\io\compress\snappy\org_apache_hadoop_io_compress_snappy.h" />
+    <ClInclude Include="src\org\apache\hadoop\io\compress\zlib\org_apache_hadoop_io_compress_zlib_ZlibCompressor.h" />
+    <ClInclude Include="src\org\apache\hadoop\io\compress\zlib\org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h" />
+    <ClInclude Include="src\org\apache\hadoop\io\compress\zlib\org_apache_hadoop_io_compress_zlib.h" />
     <ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h" />
     <ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h" />
     <ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h" />

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Fri Apr 18 16:32:35 2014
@@ -47,6 +47,7 @@ static int (*dlsym_deflateEnd)(z_streamp
 #endif
 
 #ifdef WINDOWS
+#include "winutils.h"
 #include <Strsafe.h>
 typedef int (__cdecl *__dlsym_deflateInit2_) (z_streamp, int, int, int, int, int, const char *, int);
 typedef int (__cdecl *__dlsym_deflate) (z_streamp, int);
@@ -379,7 +380,16 @@ Java_org_apache_hadoop_io_compress_zlib_
     }
   }
 #endif
-  return (*env)->NewStringUTF(env, HADOOP_ZLIB_LIBRARY);
+
+#ifdef WINDOWS
+  LPWSTR filename = NULL;
+  GetLibraryName(dlsym_deflateInit2_, &filename);
+  if (filename != NULL) {
+    return (*env)->NewString(env, filename, (jsize) wcslen(filename));
+  } else {
+    return (*env)->NewStringUTF(env, "Unavailable");
+  }
+#endif
 }
 
 /**

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c Fri Apr 18 16:32:35 2014
@@ -178,6 +178,11 @@ int hadoop_user_info_getgroups(struct ha
       return ret;
     }
     return 0;
+  } else if (ret != -1) {
+    // Any return code that is not -1 is considered as error.
+    // Since the user lookup was successful, there should be at least one
+    // group for this user.
+    return EIO;
   }
   ngids = realloc(uinfo->gids, sizeof(uinfo->gids[0]) * ngroups);
   if (!ngids) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Fri Apr 18 16:32:35 2014
@@ -452,6 +452,11 @@
   <description>The AbstractFileSystem for file: uris.</description>
 </property>
 
+<property>
+  <name>fs.AbstractFileSystem.har.impl</name>
+  <value>org.apache.hadoop.fs.HarFs</value>
+  <description>The AbstractFileSystem for har: uris.</description>
+</property> 
 
 <property>
   <name>fs.AbstractFileSystem.hdfs.impl</name>

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java Fri Apr 18 16:32:35 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.crypto.key;
 
+import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.fs.Path;
@@ -24,16 +25,21 @@ import org.junit.Test;
 
 import java.io.IOException;
 import java.net.URI;
+import java.security.NoSuchAlgorithmException;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.List;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertArrayEquals;
 
 public class TestKeyProvider {
 
+  private static final String CIPHER = "AES";
+
   @Test
   public void testBuildVersionName() throws Exception {
     assertEquals("/a/b@3", KeyProvider.buildVersionName("/a/b", 3));
@@ -62,23 +68,47 @@ public class TestKeyProvider {
 
   @Test
   public void testMetadata() throws Exception {
+    //Metadata without description
     DateFormat format = new SimpleDateFormat("y/m/d");
     Date date = format.parse("2013/12/25");
-    KeyProvider.Metadata meta = new KeyProvider.Metadata("myCipher", 100,
+    KeyProvider.Metadata meta = new KeyProvider.Metadata("myCipher", 100, null,
         date, 123);
     assertEquals("myCipher", meta.getCipher());
     assertEquals(100, meta.getBitLength());
+    assertNull(meta.getDescription());
     assertEquals(date, meta.getCreated());
     assertEquals(123, meta.getVersions());
     KeyProvider.Metadata second = new KeyProvider.Metadata(meta.serialize());
     assertEquals(meta.getCipher(), second.getCipher());
     assertEquals(meta.getBitLength(), second.getBitLength());
+    assertNull(second.getDescription());
     assertEquals(meta.getCreated(), second.getCreated());
     assertEquals(meta.getVersions(), second.getVersions());
     int newVersion = second.addVersion();
     assertEquals(123, newVersion);
     assertEquals(124, second.getVersions());
     assertEquals(123, meta.getVersions());
+
+    //Metadata with description
+    format = new SimpleDateFormat("y/m/d");
+    date = format.parse("2013/12/25");
+    meta = new KeyProvider.Metadata("myCipher", 100,
+        "description", date, 123);
+    assertEquals("myCipher", meta.getCipher());
+    assertEquals(100, meta.getBitLength());
+    assertEquals("description", meta.getDescription());
+    assertEquals(date, meta.getCreated());
+    assertEquals(123, meta.getVersions());
+    second = new KeyProvider.Metadata(meta.serialize());
+    assertEquals(meta.getCipher(), second.getCipher());
+    assertEquals(meta.getBitLength(), second.getBitLength());
+    assertEquals(meta.getDescription(), second.getDescription());
+    assertEquals(meta.getCreated(), second.getCreated());
+    assertEquals(meta.getVersions(), second.getVersions());
+    newVersion = second.addVersion();
+    assertEquals(123, newVersion);
+    assertEquals(124, second.getVersions());
+    assertEquals(123, meta.getVersions());
   }
 
   @Test
@@ -90,9 +120,11 @@ public class TestKeyProvider {
     assertEquals("myCipher", options.getCipher());
     assertEquals(512, options.getBitLength());
     options.setCipher("yourCipher");
+    options.setDescription("description");
     options.setBitLength(128);
     assertEquals("yourCipher", options.getCipher());
     assertEquals(128, options.getBitLength());
+    assertEquals("description", options.getDescription());
     options = KeyProvider.options(new Configuration());
     assertEquals(KeyProvider.DEFAULT_CIPHER, options.getCipher());
     assertEquals(KeyProvider.DEFAULT_BITLENGTH, options.getBitLength());
@@ -109,4 +141,82 @@ public class TestKeyProvider {
     assertEquals(new Path("user:///"),
         KeyProvider.unnestUri(new URI("outer://user/")));
   }
+
+  private static class MyKeyProvider extends KeyProvider {
+    private String algorithm;
+    private int size;
+    private byte[] material;
+
+    @Override
+    public KeyVersion getKeyVersion(String versionName)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public List<String> getKeys() throws IOException {
+      return null;
+    }
+
+    @Override
+    public List<KeyVersion> getKeyVersions(String name)
+        throws IOException {
+      return null;
+    }
+
+    @Override
+    public Metadata getMetadata(String name) throws IOException {
+      return new Metadata(CIPHER, 128, "description", new Date(), 0);
+    }
+
+    @Override
+    public KeyVersion createKey(String name, byte[] material,
+        Options options) throws IOException {
+      this.material = material;
+      return null;
+    }
+
+    @Override
+    public void deleteKey(String name) throws IOException {
+
+    }
+
+    @Override
+    public KeyVersion rollNewVersion(String name, byte[] material)
+        throws IOException {
+      this.material = material;
+      return null;
+    }
+
+    @Override
+    public void flush() throws IOException {
+
+    }
+
+    @Override
+    protected byte[] generateKey(int size, String algorithm)
+        throws NoSuchAlgorithmException {
+      this.size = size;
+      this.algorithm = algorithm;
+      return super.generateKey(size, algorithm);
+    }
+  }
+
+  @Test
+  public void testMaterialGeneration() throws Exception {
+    MyKeyProvider kp = new MyKeyProvider();
+    KeyProvider.Options options = new KeyProvider.Options(new Configuration());
+    options.setCipher(CIPHER);
+    options.setBitLength(128);
+    kp.createKey("hello", options);
+    Assert.assertEquals(128, kp.size);
+    Assert.assertEquals(CIPHER, kp.algorithm);
+    Assert.assertNotNull(kp.material);
+
+    kp = new MyKeyProvider();
+    kp.rollNewVersion("hello");
+    Assert.assertEquals(128, kp.size);
+    Assert.assertEquals(CIPHER, kp.algorithm);
+    Assert.assertNotNull(kp.material);
+  }
 }