You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2016/03/04 02:01:47 UTC
[08/50] [abbrv] hadoop git commit: HADOOP-12846. Credential Provider
Recursive Dependencies. Contributed by Larry McCay.
HADOOP-12846. Credential Provider Recursive Dependencies. Contributed by Larry McCay.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7634d404
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7634d404
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7634d404
Branch: refs/heads/HDFS-1312
Commit: 7634d404b750eafa135a37fa275325d0398255fb
Parents: f969277
Author: Chris Nauroth <cn...@apache.org>
Authored: Sun Feb 28 11:22:55 2016 -0800
Committer: Chris Nauroth <cn...@apache.org>
Committed: Sun Feb 28 11:22:55 2016 -0800
----------------------------------------------------------------------
hadoop-common-project/hadoop-common/CHANGES.txt | 3 +
.../apache/hadoop/security/ProviderUtils.java | 107 +++++++++++++++++--
.../hadoop/fs/azure/SimpleKeyProvider.java | 5 +-
.../fs/azure/TestWasbUriAndConfiguration.java | 35 ++++++
4 files changed, 143 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7634d404/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9f95221..4739564 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1761,6 +1761,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12849. TestSymlinkLocalFSFileSystem fails intermittently.
(Mingliang Liu via cnauroth)
+ HADOOP-12846. Credential Provider Recursive Dependencies.
+ (Larry McCay via cnauroth)
+
Release 2.7.3 - UNRELEASED
INCOMPATIBLE CHANGES
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7634d404/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
index b764506..ae08fba 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java
@@ -18,14 +18,34 @@
package org.apache.hadoop.security;
+import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider;
-public class ProviderUtils {
+/**
+ * Utility methods for both key and credential provider APIs.
+ *
+ */
+public final class ProviderUtils {
+ private static final Log LOG = LogFactory.getLog(ProviderUtils.class);
+
+ /**
+ * Hidden ctor to ensure that this utility class isn't
+ * instantiated explicitly.
+ */
+ private ProviderUtils() {
+ // hide ctor for checkstyle compliance
+ }
+
/**
* Convert a nested URI to decode the underlying path. The translation takes
* the authority and parses it into the underlying scheme and authority.
@@ -35,11 +55,15 @@ public class ProviderUtils {
* @return the unnested path
*/
public static Path unnestUri(URI nestedUri) {
- String[] parts = nestedUri.getAuthority().split("@", 2);
- StringBuilder result = new StringBuilder(parts[0]);
- result.append("://");
- if (parts.length == 2) {
- result.append(parts[1]);
+ StringBuilder result = new StringBuilder();
+ String authority = nestedUri.getAuthority();
+ if (authority != null) {
+ String[] parts = nestedUri.getAuthority().split("@", 2);
+ result.append(parts[0]);
+ result.append("://");
+ if (parts.length == 2) {
+ result.append(parts[1]);
+ }
}
result.append(nestedUri.getPath());
if (nestedUri.getQuery() != null) {
@@ -79,4 +103,75 @@ public class ProviderUtils {
"//file" + localFile.getSchemeSpecificPart(), localFile.getFragment());
}
+ /**
+ * There are certain integrations of the credential provider API in
+ * which a recursive dependency between the provider and the hadoop
+ * filesystem abstraction causes a problem. These integration points
+ * need to leverage this utility method to remove problematic provider
+ * types from the existing provider path within the configuration.
+ *
+ * @param config the existing configuration with provider path
+ * @param fileSystemClass the class which providers must be compatible
+ * @return Configuration clone with new provider path
+ */
+ public static Configuration excludeIncompatibleCredentialProviders(
+ Configuration config, Class<? extends FileSystem> fileSystemClass)
+ throws IOException {
+
+ String providerPath = config.get(
+ CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH);
+
+ if (providerPath == null) {
+ return config;
+ }
+ StringBuffer newProviderPath = new StringBuffer();
+ String[] providers = providerPath.split(",");
+ Path path = null;
+ for (String provider: providers) {
+ try {
+ path = unnestUri(new URI(provider));
+ Class<? extends FileSystem> clazz = null;
+ try {
+ String scheme = path.toUri().getScheme();
+ clazz = FileSystem.getFileSystemClass(scheme, config);
+ } catch (IOException ioe) {
+ // not all providers are filesystem based
+ // for instance user:/// will not be able to
+ // have a filesystem class associated with it.
+ if (newProviderPath.length() > 0) {
+ newProviderPath.append(",");
+ }
+ newProviderPath.append(provider);
+ }
+ if (clazz != null) {
+ if (fileSystemClass.isAssignableFrom(clazz)) {
+ LOG.debug("Filesystem based provider" +
+ " excluded from provider path due to recursive dependency: "
+ + provider);
+ } else {
+ if (newProviderPath.length() > 0) {
+ newProviderPath.append(",");
+ }
+ newProviderPath.append(provider);
+ }
+ }
+ } catch (URISyntaxException e) {
+ LOG.warn("Credential Provider URI is invalid." + provider);
+ }
+ }
+
+ String effectivePath = newProviderPath.toString();
+ if (effectivePath.equals(providerPath)) {
+ return config;
+ }
+
+ Configuration conf = new Configuration(config);
+ if (effectivePath.equals("")) {
+ conf.unset(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH);
+ } else {
+ conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
+ effectivePath);
+ }
+ return conf;
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7634d404/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java
index 28e307e..5596f7e 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/SimpleKeyProvider.java
@@ -24,6 +24,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.ProviderUtils;
/**
* Key provider that simply returns the storage account key from the
@@ -41,7 +42,9 @@ public class SimpleKeyProvider implements KeyProvider {
throws KeyProviderException {
String key = null;
try {
- char[] keyChars = conf.getPassword(getStorageAccountKeyName(accountName));
+ Configuration c = ProviderUtils.excludeIncompatibleCredentialProviders(
+ conf, NativeAzureFileSystem.class);
+ char[] keyChars = c.getPassword(getStorageAccountKeyName(accountName));
if (keyChars != null) {
key = new String(keyChars);
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7634d404/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java
index 06a5b62..cd9d1d4 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestWasbUriAndConfiguration.java
@@ -461,4 +461,39 @@ public class TestWasbUriAndConfiguration {
FileSystem.closeAll();
}
}
+
+ @Test
+ public void testCredentialProviderPathExclusions() throws Exception {
+ String providerPath =
+ "user:///,jceks://wasb/user/hrt_qa/sqoopdbpasswd.jceks," +
+ "jceks://hdfs@nn1.example.com/my/path/test.jceks";
+ Configuration config = new Configuration();
+ config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
+ providerPath);
+ String newPath = "user:///,jceks://hdfs@nn1.example.com/my/path/test.jceks";
+
+ excludeAndTestExpectations(config, newPath);
+ }
+
+ @Test
+ public void testExcludeAllProviderTypesFromConfig() throws Exception {
+ String providerPath =
+ "jceks://wasb/tmp/test.jceks," +
+ "jceks://wasb@/my/path/test.jceks";
+ Configuration config = new Configuration();
+ config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
+ providerPath);
+ String newPath = null;
+
+ excludeAndTestExpectations(config, newPath);
+ }
+
+ void excludeAndTestExpectations(Configuration config, String newPath)
+ throws Exception {
+ Configuration conf = ProviderUtils.excludeIncompatibleCredentialProviders(
+ config, NativeAzureFileSystem.class);
+ String effectivePath = conf.get(
+ CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, null);
+ assertEquals(newPath, effectivePath);
+ }
}