You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by as...@apache.org on 2016/02/11 08:59:54 UTC

[20/50] hadoop git commit: HADOOP-12752. Improve diagnostics/use of envvar/sysprop credential propagation. Contributed by Steve Loughran.

HADOOP-12752. Improve diagnostics/use of envvar/sysprop credential propagation. Contributed by Steve Loughran.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cf326157
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cf326157
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cf326157

Branch: refs/heads/yarn-2877
Commit: cf3261570ae139c177225af165557038a9280a5d
Parents: d37eb82
Author: cnauroth <cn...@apache.org>
Authored: Mon Feb 8 09:36:09 2016 -0800
Committer: cnauroth <cn...@apache.org>
Committed: Mon Feb 8 09:36:09 2016 -0800

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 .../hadoop/security/UserGroupInformation.java   | 22 ++++++++++++++++----
 2 files changed, 21 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf326157/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index d99936f..9130a8d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1108,6 +1108,9 @@ Release 2.8.0 - UNRELEASED
     HADOOP-12759. RollingFileSystemSink should eagerly rotate directories.
     (Daniel Templeton via wang)
 
+    HADOOP-12752. Improve diagnostics/use of envvar/sysprop credential
+    propagation (Steve Loughran via cnauroth)
+
   OPTIMIZATIONS
 
     HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf326157/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
index f62b2b5..047e645 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
@@ -24,6 +24,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FI
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.security.AccessControlContext;
@@ -54,8 +55,6 @@ import javax.security.auth.login.LoginContext;
 import javax.security.auth.login.LoginException;
 import javax.security.auth.spi.LoginModule;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -75,6 +74,8 @@ import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * User and group information for Hadoop.
@@ -85,7 +86,9 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive", "Oozie"})
 @InterfaceStability.Evolving
 public class UserGroupInformation {
-  private static final Log LOG =  LogFactory.getLog(UserGroupInformation.class);
+  private static final Logger LOG = LoggerFactory.getLogger(
+      UserGroupInformation.class);
+
   /**
    * Percentage of the ticket window to use before we renew ticket.
    */
@@ -848,8 +851,19 @@ public class UserGroupInformation {
         // Load the token storage file and put all of the tokens into the
         // user. Don't use the FileSystem API for reading since it has a lock
         // cycle (HADOOP-9212).
+        File source = new File(fileLocation);
+        LOG.debug("Reading credentials from location set in {}: {}",
+            HADOOP_TOKEN_FILE_LOCATION,
+            source.getCanonicalPath());
+        if (!source.isFile()) {
+          throw new FileNotFoundException("Source file "
+              + source.getCanonicalPath() + " from "
+              + HADOOP_TOKEN_FILE_LOCATION
+              + " not found");
+        }
         Credentials cred = Credentials.readTokenStorageFile(
-            new File(fileLocation), conf);
+            source, conf);
+        LOG.debug("Loaded {} tokens", cred.numberOfTokens());
         loginUser.addCredentials(cred);
       }
       loginUser.spawnAutoRenewalThreadForUserCreds();