You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2015/06/16 20:41:22 UTC

[03/50] [abbrv] hadoop git commit: HADOOP-12059. S3Credentials should support use of CredentialProvider. Contributed by Sean Busbey.

HADOOP-12059. S3Credentials should support use of CredentialProvider. Contributed by Sean Busbey.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2dbc40e6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2dbc40e6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2dbc40e6

Branch: refs/heads/HDFS-7240
Commit: 2dbc40e6086026ef02747223982aa68f2d328ade
Parents: 3e000a9
Author: Andrew Wang <wa...@apache.org>
Authored: Fri Jun 5 13:11:01 2015 -0700
Committer: Andrew Wang <wa...@apache.org>
Committed: Fri Jun 5 13:11:01 2015 -0700

----------------------------------------------------------------------
 .gitignore                                      |   1 +
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../apache/hadoop/security/ProviderUtils.java   |  30 ++++++
 .../alias/AbstractJavaKeyStoreProvider.java     |  10 +-
 .../alias/LocalJavaKeyStoreProvider.java        |  25 ++++-
 .../alias/TestCredentialProviderFactory.java    |  17 ++-
 .../org/apache/hadoop/fs/s3/S3Credentials.java  |  10 +-
 .../apache/hadoop/fs/s3/TestS3Credentials.java  | 107 ++++++++++++++++++-
 8 files changed, 195 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 779f507..cde198e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,5 +22,6 @@ hadoop-common-project/hadoop-common/src/test/resources/contract-test-options.xml
 hadoop-tools/hadoop-openstack/src/test/resources/contract-test-options.xml
 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.toolbox
 yarnregistry.pdf
+hadoop-tools/hadoop-aws/src/test/resources/auth-keys.xml
 hadoop-tools/hadoop-aws/src/test/resources/contract-test-options.xml
 patchprocess/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 5f4bdb8..51579da 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -634,6 +634,9 @@ Release 2.8.0 - UNRELEASED
     HADOOP-12037. Fix wrong classname in example configuration of hadoop-auth
     documentation. (Masatake Iwasaki via wang)
 
+    HADOOP-12059. S3Credentials should support use of CredentialProvider.
+    (Sean Busbey via wang)
+
   OPTIMIZATIONS
 
     HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
index 97d656d..b764506 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
@@ -19,8 +19,11 @@
 package org.apache.hadoop.security;
 
 import java.net.URI;
+import java.net.URISyntaxException;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
+import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider;
 
 public class ProviderUtils {
   /**
@@ -49,4 +52,31 @@ public class ProviderUtils {
     }
     return new Path(result.toString());
   }
+
+  /**
+   * Mangle given local java keystore file URI to allow use as a
+   * LocalJavaKeyStoreProvider.
+   * @param localFile absolute URI with file scheme and no authority component.
+   *                  i.e. return of File.toURI,
+   *                  e.g. file:///home/larry/creds.jceks
+   * @return URI of the form localjceks://file/home/larry/creds.jceks
+   * @throws IllegalArgumentException if localFile isn't not a file uri or if it
+   *                                  has an authority component.
+   * @throws URISyntaxException if the wrapping process violates RFC 2396
+   */
+  public static URI nestURIForLocalJavaKeyStoreProvider(final URI localFile)
+      throws URISyntaxException {
+    if (!("file".equals(localFile.getScheme()))) {
+      throw new IllegalArgumentException("passed URI had a scheme other than " +
+          "file.");
+    }
+    if (localFile.getAuthority() != null) {
+      throw new IllegalArgumentException("passed URI must not have an " +
+          "authority component. For non-local keystores, please use " +
+          JavaKeyStoreProvider.class.getName());
+    }
+    return new URI(LocalJavaKeyStoreProvider.SCHEME_NAME,
+        "//file" + localFile.getSchemeSpecificPart(), localFile.getFragment());
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
index 9251044..76b8cd5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.security.alias;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -60,6 +62,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
  */
 @InterfaceAudience.Private
 public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
+  public static final Log LOG = LogFactory.getLog(
+      AbstractJavaKeyStoreProvider.class);
   public static final String CREDENTIAL_PASSWORD_NAME =
       "HADOOP_CREDSTORE_PASSWORD";
   public static final String KEYSTORE_PASSWORD_FILE_KEY =
@@ -197,6 +201,9 @@ public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
   protected void initFileSystem(URI keystoreUri, Configuration conf)
       throws IOException {
     path = ProviderUtils.unnestUri(keystoreUri);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("backing jks path initialized to " + path);
+    }
   }
 
   @Override
@@ -318,9 +325,10 @@ public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider {
     writeLock.lock();
     try {
       if (!changed) {
+        LOG.debug("Keystore hasn't changed, returning.");
         return;
       }
-      // write out the keystore
+      LOG.debug("Writing out keystore.");
       try (OutputStream out = getOutputStreamForKeystore()) {
         keyStore.store(out, password);
       } catch (KeyStoreException e) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
index 3840979..61744c7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java
@@ -65,13 +65,17 @@ public final class LocalJavaKeyStoreProvider extends
 
   @Override
   protected OutputStream getOutputStreamForKeystore() throws IOException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("using '" + file + "' for output stream.");
+    }
     FileOutputStream out = new FileOutputStream(file);
     return out;
   }
 
   @Override
   protected boolean keystoreExists() throws IOException {
-    return file.exists();
+    /* The keystore loader doesn't handle zero length files. */
+    return file.exists() && (file.length() > 0);
   }
 
   @Override
@@ -122,6 +126,22 @@ public final class LocalJavaKeyStoreProvider extends
     super.initFileSystem(uri, conf);
     try {
       file = new File(new URI(getPath().toString()));
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("initialized local file as '" + file + "'.");
+        if (file.exists()) {
+          LOG.debug("the local file exists and is size " + file.length());
+          if (LOG.isTraceEnabled()) {
+            if (file.canRead()) {
+              LOG.trace("we can read the local file.");
+            }
+            if (file.canWrite()) {
+              LOG.trace("we can write the local file.");
+            }
+          }
+        } else {
+          LOG.debug("the local file does not exist.");
+        }
+      }
     } catch (URISyntaxException e) {
       throw new IOException(e);
     }
@@ -130,6 +150,9 @@ public final class LocalJavaKeyStoreProvider extends
   @Override
   public void flush() throws IOException {
     super.flush();
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Reseting permissions to '" + permissions + "'");
+    }
     if (!Shell.WINDOWS) {
       Files.setPosixFilePermissions(Paths.get(file.getCanonicalPath()),
           permissions);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
index 16cb0be..73cf3f4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java
@@ -23,6 +23,8 @@ import java.net.URI;
 import java.util.List;
 import java.util.Random;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -32,14 +34,27 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.ProviderUtils;
 import org.apache.hadoop.security.UserGroupInformation;
+
+import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TestName;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 public class TestCredentialProviderFactory {
-  
+  public static final Log LOG = LogFactory.getLog(TestCredentialProviderFactory.class);
+
+  @Rule
+  public final TestName test = new TestName();
+
+  @Before
+  public void announce() {
+    LOG.info("Running test " + test.getMethodName());
+  }
+
   private static char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
   'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
   'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
index 6b78ad7..fdacc3f 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.s3;
 
+import java.io.IOException;
 import java.net.URI;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -39,8 +40,10 @@ public class S3Credentials {
   /**
    * @throws IllegalArgumentException if credentials for S3 cannot be
    * determined.
+   * @throws IOException if credential providers are misconfigured and we have
+   *                     to talk to them.
    */
-  public void initialize(URI uri, Configuration conf) {
+  public void initialize(URI uri, Configuration conf) throws IOException {
     if (uri.getHost() == null) {
       throw new IllegalArgumentException("Invalid hostname in URI " + uri);
     }
@@ -64,7 +67,10 @@ public class S3Credentials {
       accessKey = conf.getTrimmed(accessKeyProperty);
     }
     if (secretAccessKey == null) {
-      secretAccessKey = conf.getTrimmed(secretAccessKeyProperty);
+      final char[] pass = conf.getPassword(secretAccessKeyProperty);
+      if (pass != null) {
+        secretAccessKey = (new String(pass)).trim();
+      }
     }
     if (accessKey == null && secretAccessKey == null) {
       throw new IllegalArgumentException("AWS " +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dbc40e6/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java
index bcbf0dc..28e1f4b 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3/TestS3Credentials.java
@@ -17,13 +17,40 @@
  */
 package org.apache.hadoop.fs.s3;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.ProviderUtils;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+
+import java.io.File;
 import java.net.URI;
 
-import junit.framework.TestCase;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.rules.TestName;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
-import org.apache.hadoop.conf.Configuration;
+public class TestS3Credentials {
+  public static final Log LOG = LogFactory.getLog(TestS3Credentials.class);
+
+  @Rule
+  public final TestName test = new TestName();
+
+  @Before
+  public void announce() {
+    LOG.info("Running test " + test.getMethodName());
+  }
+
+  private static final String EXAMPLE_ID = "AKASOMEACCESSKEY";
+  private static final String EXAMPLE_KEY =
+      "RGV0cm9pdCBSZ/WQgY2xl/YW5lZCB1cAEXAMPLE";
 
-public class TestS3Credentials extends TestCase {
+  @Test
   public void testInvalidHostnameWithUnderscores() throws Exception {
     S3Credentials s3Credentials = new S3Credentials();
     try {
@@ -33,4 +60,78 @@ public class TestS3Credentials extends TestCase {
       assertEquals("Invalid hostname in URI s3://a:b@c_d", e.getMessage());
     }
   }
+
+  @Test
+  public void testPlaintextConfigPassword() throws Exception {
+    S3Credentials s3Credentials = new S3Credentials();
+    Configuration conf = new Configuration();
+    conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID);
+    conf.set("fs.s3.awsSecretAccessKey", EXAMPLE_KEY);
+    s3Credentials.initialize(new URI("s3://foobar"), conf);
+    assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
+        s3Credentials.getAccessKey());
+    assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
+        s3Credentials.getSecretAccessKey());
+  }
+
+  @Test
+  public void testPlaintextConfigPasswordWithWhitespace() throws Exception {
+    S3Credentials s3Credentials = new S3Credentials();
+    Configuration conf = new Configuration();
+    conf.set("fs.s3.awsAccessKeyId", "\r\n " + EXAMPLE_ID +
+        " \r\n");
+    conf.set("fs.s3.awsSecretAccessKey", "\r\n " + EXAMPLE_KEY +
+        " \r\n");
+    s3Credentials.initialize(new URI("s3://foobar"), conf);
+    assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
+        s3Credentials.getAccessKey());
+    assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
+        s3Credentials.getSecretAccessKey());
+  }
+
+  @Rule
+  public final TemporaryFolder tempDir = new TemporaryFolder();
+
+  @Test
+  public void testCredentialProvider() throws Exception {
+    // set up conf to have a cred provider
+    final Configuration conf = new Configuration();
+    final File file = tempDir.newFile("test.jks");
+    final URI jks = ProviderUtils.nestURIForLocalJavaKeyStoreProvider(
+        file.toURI());
+    conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
+        jks.toString());
+
+    // add our creds to the provider
+    final CredentialProvider provider =
+        CredentialProviderFactory.getProviders(conf).get(0);
+    provider.createCredentialEntry("fs.s3.awsSecretAccessKey",
+        EXAMPLE_KEY.toCharArray());
+    provider.flush();
+
+    // make sure S3Creds can retrieve things.
+    S3Credentials s3Credentials = new S3Credentials();
+    conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID);
+    s3Credentials.initialize(new URI("s3://foobar"), conf);
+    assertEquals("Could not retrieve proper access key", EXAMPLE_ID,
+        s3Credentials.getAccessKey());
+    assertEquals("Could not retrieve proper secret", EXAMPLE_KEY,
+        s3Credentials.getSecretAccessKey());
+  }
+
+  @Test(expected=IllegalArgumentException.class)
+  public void noSecretShouldThrow() throws Exception {
+    S3Credentials s3Credentials = new S3Credentials();
+    Configuration conf = new Configuration();
+    conf.set("fs.s3.awsAccessKeyId", EXAMPLE_ID);
+    s3Credentials.initialize(new URI("s3://foobar"), conf);
+  }
+
+  @Test(expected=IllegalArgumentException.class)
+  public void noAccessIdShouldThrow() throws Exception {
+    S3Credentials s3Credentials = new S3Credentials();
+    Configuration conf = new Configuration();
+    conf.set("fs.s3.awsSecretAccessKey", EXAMPLE_KEY);
+    s3Credentials.initialize(new URI("s3://foobar"), conf);
+  }
 }