You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2013/11/09 22:48:05 UTC

[2/4] git commit: Remove the runAsUser as it breaks secure hdfs access

Remove the runAsUser as it breaks secure hdfs access


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/f95cb04e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/f95cb04e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/f95cb04e

Branch: refs/heads/master
Commit: f95cb04e4009f25f6b466173fcab24bd861acb24
Parents: 5f9ed51
Author: tgravescs <tg...@yahoo.com>
Authored: Fri Nov 8 10:07:15 2013 -0600
Committer: tgravescs <tg...@yahoo.com>
Committed: Fri Nov 8 10:07:15 2013 -0600

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/executor/Executor.scala | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/f95cb04e/core/src/main/scala/org/apache/spark/executor/Executor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index 5c9bb9d..0a4f10c 100644
--- a/core/src/main/scala/org/apache/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -179,7 +179,10 @@ private[spark] class Executor(
       }
     }
 
-    override def run(): Unit = SparkHadoopUtil.get.runAsUser(sparkUser) { () =>
+    // the runAsUser breaks secure HDFS access. It needs to add the credentials
+    // for the user if running as a user. Comment out for now. 
+    //override def run(): Unit = SparkHadoopUtil.get.runAsUser(sparkUser) { () =>
+    override def run(): Unit = {
       val startTime = System.currentTimeMillis()
       SparkEnv.set(env)
       Thread.currentThread.setContextClassLoader(replClassLoader)