You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2018/02/22 20:07:59 UTC

spark git commit: [SPARK-23476][CORE] Generate secret in local mode when authentication on

Repository: spark
Updated Branches:
  refs/heads/master 87293c746 -> c5abb3c2d


[SPARK-23476][CORE] Generate secret in local mode when authentication on

## What changes were proposed in this pull request?

If spark is run with "spark.authenticate=true", then it will fail to start in local mode.

This PR generates secret in local mode when authentication on.

## How was this patch tested?

Modified existing unit test.
Manually started spark-shell.

Author: Gabor Somogyi <ga...@gmail.com>

Closes #20652 from gaborgsomogyi/SPARK-23476.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c5abb3c2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c5abb3c2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c5abb3c2

Branch: refs/heads/master
Commit: c5abb3c2d16f601d507bee3c53663d4e117eb8b5
Parents: 87293c7
Author: Gabor Somogyi <ga...@gmail.com>
Authored: Thu Feb 22 12:07:51 2018 -0800
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Thu Feb 22 12:07:51 2018 -0800

----------------------------------------------------------------------
 .../org/apache/spark/SecurityManager.scala      | 16 +++++--
 .../org/apache/spark/SecurityManagerSuite.scala | 50 +++++++++++++-------
 docs/security.md                                |  2 +-
 3 files changed, 46 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c5abb3c2/core/src/main/scala/org/apache/spark/SecurityManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala
index 5b15a1c..2519d26 100644
--- a/core/src/main/scala/org/apache/spark/SecurityManager.scala
+++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
@@ -520,19 +520,25 @@ private[spark] class SecurityManager(
    *
    * If authentication is disabled, do nothing.
    *
-   * In YARN mode, generate a new secret and store it in the current user's credentials.
+   * In YARN and local mode, generate a new secret and store it in the current user's credentials.
    *
    * In other modes, assert that the auth secret is set in the configuration.
    */
   def initializeAuth(): Unit = {
+    import SparkMasterRegex._
+
     if (!sparkConf.get(NETWORK_AUTH_ENABLED)) {
       return
     }
 
-    if (sparkConf.get(SparkLauncher.SPARK_MASTER, null) != "yarn") {
-      require(sparkConf.contains(SPARK_AUTH_SECRET_CONF),
-        s"A secret key must be specified via the $SPARK_AUTH_SECRET_CONF config.")
-      return
+    val master = sparkConf.get(SparkLauncher.SPARK_MASTER, "")
+    master match {
+      case "yarn" | "local" | LOCAL_N_REGEX(_) | LOCAL_N_FAILURES_REGEX(_, _) =>
+        // Secret generation allowed here
+      case _ =>
+        require(sparkConf.contains(SPARK_AUTH_SECRET_CONF),
+          s"A secret key must be specified via the $SPARK_AUTH_SECRET_CONF config.")
+        return
     }
 
     val rnd = new SecureRandom()

http://git-wip-us.apache.org/repos/asf/spark/blob/c5abb3c2/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
index cf59265..106ece7 100644
--- a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
@@ -440,23 +440,41 @@ class SecurityManagerSuite extends SparkFunSuite with ResetSystemProperties {
     assert(keyFromEnv === new SecurityManager(conf2).getSecretKey())
   }
 
-  test("secret key generation in yarn mode") {
-    val conf = new SparkConf()
-      .set(NETWORK_AUTH_ENABLED, true)
-      .set(SparkLauncher.SPARK_MASTER, "yarn")
-    val mgr = new SecurityManager(conf)
-
-    UserGroupInformation.createUserForTesting("authTest", Array()).doAs(
-      new PrivilegedExceptionAction[Unit]() {
-        override def run(): Unit = {
-          mgr.initializeAuth()
-          val creds = UserGroupInformation.getCurrentUser().getCredentials()
-          val secret = creds.getSecretKey(SecurityManager.SECRET_LOOKUP_KEY)
-          assert(secret != null)
-          assert(new String(secret, UTF_8) === mgr.getSecretKey())
+  test("secret key generation") {
+    Seq(
+      ("yarn", true),
+      ("local", true),
+      ("local[*]", true),
+      ("local[1, 2]", true),
+      ("local-cluster[2, 1, 1024]", false),
+      ("invalid", false)
+    ).foreach { case (master, shouldGenerateSecret) =>
+      val conf = new SparkConf()
+        .set(NETWORK_AUTH_ENABLED, true)
+        .set(SparkLauncher.SPARK_MASTER, master)
+      val mgr = new SecurityManager(conf)
+
+      UserGroupInformation.createUserForTesting("authTest", Array()).doAs(
+        new PrivilegedExceptionAction[Unit]() {
+          override def run(): Unit = {
+            if (shouldGenerateSecret) {
+              mgr.initializeAuth()
+              val creds = UserGroupInformation.getCurrentUser().getCredentials()
+              val secret = creds.getSecretKey(SecurityManager.SECRET_LOOKUP_KEY)
+              assert(secret != null)
+              assert(new String(secret, UTF_8) === mgr.getSecretKey())
+            } else {
+              intercept[IllegalArgumentException] {
+                mgr.initializeAuth()
+              }
+              intercept[IllegalArgumentException] {
+                mgr.getSecretKey()
+              }
+            }
+          }
         }
-      }
-    )
+      )
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/c5abb3c2/docs/security.md
----------------------------------------------------------------------
diff --git a/docs/security.md b/docs/security.md
index bebc28d..0f384b4 100644
--- a/docs/security.md
+++ b/docs/security.md
@@ -6,7 +6,7 @@ title: Security
 
 Spark currently supports authentication via a shared secret. Authentication can be configured to be on via the `spark.authenticate` configuration parameter. This parameter controls whether the Spark communication protocols do authentication using the shared secret. This authentication is a basic handshake to make sure both sides have the same shared secret and are allowed to communicate. If the shared secret is not identical they will not be allowed to communicate. The shared secret is created as follows:
 
-* For Spark on [YARN](running-on-yarn.html) deployments, configuring `spark.authenticate` to `true` will automatically handle generating and distributing the shared secret. Each application will use a unique shared secret.
+* For Spark on [YARN](running-on-yarn.html) and local deployments, configuring `spark.authenticate` to `true` will automatically handle generating and distributing the shared secret. Each application will use a unique shared secret.
 * For other types of Spark deployments, the Spark parameter `spark.authenticate.secret` should be configured on each of the nodes. This secret will be used by all the Master/Workers and applications.
 
 ## Web UI


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org