You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/01/08 06:30:42 UTC

[5/6] git commit: Address review comments

Address review comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/2c421749
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/2c421749
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/2c421749

Branch: refs/heads/master
Commit: 2c421749eae1e3945ca34ce006addd98a0c1a00b
Parents: 044c8ad
Author: Matei Zaharia <ma...@databricks.com>
Authored: Tue Jan 7 19:30:23 2014 -0500
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Tue Jan 7 19:30:23 2014 -0500

----------------------------------------------------------------------
 .../scala/org/apache/spark/deploy/ApplicationDescription.scala   | 2 +-
 .../main/scala/org/apache/spark/deploy/client/TestClient.scala   | 2 +-
 .../scala/org/apache/spark/deploy/master/ApplicationInfo.scala   | 2 +-
 core/src/main/scala/org/apache/spark/deploy/master/Master.scala  | 3 +++
 .../spark/scheduler/cluster/SparkDeploySchedulerBackend.scala    | 2 +-
 .../test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala   | 2 +-
 .../org/apache/spark/deploy/worker/ExecutorRunnerTest.scala      | 2 +-
 docs/configuration.md                                            | 4 ++--
 8 files changed, 11 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala b/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
index 19d393a..e38459b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
@@ -19,7 +19,7 @@ package org.apache.spark.deploy
 
 private[spark] class ApplicationDescription(
     val name: String,
-    val maxCores: Int, /* Integer.MAX_VALUE denotes an unlimited number of cores */
+    val maxCores: Option[Int],
     val memoryPerSlave: Int,
     val command: Command,
     val sparkHome: String,

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
index ef649fd..28ebbdc 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
@@ -48,7 +48,7 @@ private[spark] object TestClient {
     val (actorSystem, port) = AkkaUtils.createActorSystem("spark", Utils.localIpAddress, 0,
       conf = new SparkConf)
     val desc = new ApplicationDescription(
-      "TestClient", 1, 512, Command("spark.deploy.client.TestExecutor", Seq(), Map()),
+      "TestClient", Some(1), 512, Command("spark.deploy.client.TestExecutor", Seq(), Map()),
       "dummy-spark-home", "ignored")
     val listener = new TestListener
     val client = new Client(actorSystem, Array(url), desc, listener, new SparkConf)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
index 1321d92..3e26379 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
@@ -82,7 +82,7 @@ private[spark] class ApplicationInfo(
     }
   }
 
-  private val myMaxCores = if (desc.maxCores == Int.MaxValue) defaultCores else desc.maxCores
+  private val myMaxCores = desc.maxCores.getOrElse(defaultCores)
 
   def coresLeft: Int = myMaxCores - coresGranted
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index ee01fb1..6617b71 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -92,6 +92,9 @@ private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Act
 
   // Default maxCores for applications that don't specify it (i.e. pass Int.MaxValue)
   val defaultCores = conf.getInt("spark.deploy.defaultCores", Int.MaxValue)
+  if (defaultCores < 1) {
+    throw new SparkException("spark.deploy.defaultCores must be positive")
+  }
 
   override def preStart() {
     logInfo("Starting Spark master at " + masterUrl)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 9858717..73fc374 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -38,7 +38,7 @@ private[spark] class SparkDeploySchedulerBackend(
   var stopping = false
   var shutdownCallback : (SparkDeploySchedulerBackend) => Unit = _
 
-  val maxCores = conf.get("spark.cores.max",  Int.MaxValue.toString).toInt
+  val maxCores = conf.getOption("spark.cores.max").map(_.toInt)
 
   override def start() {
     super.start()

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 810ebf4..331fa3a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -70,7 +70,7 @@ class JsonProtocolSuite extends FunSuite {
 
   def createAppDesc() : ApplicationDescription = {
     val cmd = new Command("mainClass", List("arg1", "arg2"), Map())
-    new ApplicationDescription("name", 4, 1234, cmd, "sparkHome", "appUiUrl")
+    new ApplicationDescription("name", Some(4), 1234, cmd, "sparkHome", "appUiUrl")
   }
   def createAppInfo() : ApplicationInfo = {
     new ApplicationInfo(

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
index 7e5aaa3..be93074 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
@@ -27,7 +27,7 @@ class ExecutorRunnerTest extends FunSuite {
   test("command includes appId") {
     def f(s:String) = new File(s)
     val sparkHome = sys.env.get("SPARK_HOME").orElse(sys.props.get("spark.home")).get
-    val appDesc = new ApplicationDescription("app name", 8, 500, Command("foo", Seq(),Map()),
+    val appDesc = new ApplicationDescription("app name", Some(8), 500, Command("foo", Seq(),Map()),
       sparkHome, "appUiUrl")
     val appId = "12345-worker321-9876"
     val er = new ExecutorRunner(appId, 1, appDesc, 8, 500, null, "blah", "worker321", f(sparkHome),

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/2c421749/docs/configuration.md
----------------------------------------------------------------------
diff --git a/docs/configuration.md b/docs/configuration.md
index 52ed59b..1d6c3d1 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -418,7 +418,7 @@ Apart from these, the following properties are also available, and may be useful
     Whether the standalone cluster manager should spread applications out across nodes or try
     to consolidate them onto as few nodes as possible. Spreading out is usually better for
     data locality in HDFS, but consolidating is more efficient for compute-intensive workloads. <br/>
-    <b>Note:</b> this setting needs to be configured in the cluster master, not in individual
+    <b>Note:</b> this setting needs to be configured in the standalone cluster master, not in individual
     applications; you can set it through <code>SPARK_JAVA_OPTS</code> in <code>spark-env.sh</code>.
   </td>
 </tr>
@@ -431,7 +431,7 @@ Apart from these, the following properties are also available, and may be useful
     cores unless they configure <code>spark.cores.max</code> themselves.
     Set this lower on a shared cluster to prevent users from grabbing
     the whole cluster by default. <br/>
-    <b>Note:</b> this setting needs to be configured in the cluster master, not in individual
+    <b>Note:</b> this setting needs to be configured in the standalone cluster master, not in individual
     applications; you can set it through <code>SPARK_JAVA_OPTS</code> in <code>spark-env.sh</code>.
   </td>
 </tr>