You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ad...@apache.org on 2014/03/20 01:57:48 UTC
git commit: Revert "SPARK-1099:Spark's local mode should probably
respect spark.cores.max by default"
Repository: spark
Updated Branches:
refs/heads/master 16789317a -> ffe272d97
Revert "SPARK-1099:Spark's local mode should probably respect spark.cores.max by default"
This reverts commit 16789317a34c1974f7b35960f06a7b51d8e0f29f. Jenkins was not run for this PR.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ffe272d9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ffe272d9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ffe272d9
Branch: refs/heads/master
Commit: ffe272d97c22955fe7744b1c0132cd9877b6df96
Parents: 1678931
Author: Aaron Davidson <aa...@databricks.com>
Authored: Wed Mar 19 17:56:48 2014 -0700
Committer: Aaron Davidson <aa...@databricks.com>
Committed: Wed Mar 19 17:56:48 2014 -0700
----------------------------------------------------------------------
.../scala/org/apache/spark/SparkContext.scala | 5 +----
.../test/scala/org/apache/spark/FileSuite.scala | 4 ++--
.../SparkContextSchedulerCreationSuite.scala | 19 +++----------------
3 files changed, 6 insertions(+), 22 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/ffe272d9/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 8f74607..a1003b7 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1262,10 +1262,7 @@ object SparkContext extends Logging {
master match {
case "local" =>
val scheduler = new TaskSchedulerImpl(sc, MAX_LOCAL_TASK_FAILURES, isLocal = true)
- // Use user specified in config, up to all available cores
- val realCores = Runtime.getRuntime.availableProcessors()
- val toUseCores = math.min(sc.conf.getInt("spark.cores.max", realCores), realCores)
- val backend = new LocalBackend(scheduler, toUseCores)
+ val backend = new LocalBackend(scheduler, 1)
scheduler.initialize(backend)
scheduler
http://git-wip-us.apache.org/repos/asf/spark/blob/ffe272d9/core/src/test/scala/org/apache/spark/FileSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index b4a5881..01af940 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -34,7 +34,7 @@ import org.apache.spark.SparkContext._
class FileSuite extends FunSuite with LocalSparkContext {
test("text files") {
- sc = new SparkContext("local[1]", "test")
+ sc = new SparkContext("local", "test")
val tempDir = Files.createTempDir()
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 4)
@@ -176,7 +176,7 @@ class FileSuite extends FunSuite with LocalSparkContext {
test("write SequenceFile using new Hadoop API") {
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat
- sc = new SparkContext("local[1]", "test")
+ sc = new SparkContext("local", "test")
val tempDir = Files.createTempDir()
val outputDir = new File(tempDir, "output").getAbsolutePath
val nums = sc.makeRDD(1 to 3).map(x => (new IntWritable(x), new Text("a" * x)))
http://git-wip-us.apache.org/repos/asf/spark/blob/ffe272d9/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index 9dd42be..b543471 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -27,10 +27,10 @@ import org.apache.spark.scheduler.local.LocalBackend
class SparkContextSchedulerCreationSuite
extends FunSuite with PrivateMethodTester with LocalSparkContext with Logging {
- def createTaskScheduler(master: String, conf: SparkConf = new SparkConf()): TaskSchedulerImpl = {
+ def createTaskScheduler(master: String): TaskSchedulerImpl = {
// Create local SparkContext to setup a SparkEnv. We don't actually want to start() the
// real schedulers, so we don't want to create a full SparkContext with the desired scheduler.
- sc = new SparkContext("local", "test", conf)
+ sc = new SparkContext("local", "test")
val createTaskSchedulerMethod = PrivateMethod[TaskScheduler]('createTaskScheduler)
val sched = SparkContext invokePrivate createTaskSchedulerMethod(sc, master)
sched.asInstanceOf[TaskSchedulerImpl]
@@ -44,26 +44,13 @@ class SparkContextSchedulerCreationSuite
}
test("local") {
- var conf = new SparkConf()
- conf.set("spark.cores.max", "1")
- val sched = createTaskScheduler("local", conf)
+ val sched = createTaskScheduler("local")
sched.backend match {
case s: LocalBackend => assert(s.totalCores === 1)
case _ => fail()
}
}
- test("local-cores-exceed") {
- val cores = Runtime.getRuntime.availableProcessors() + 1
- var conf = new SparkConf()
- conf.set("spark.cores.max", cores.toString)
- val sched = createTaskScheduler("local", conf)
- sched.backend match {
- case s: LocalBackend => assert(s.totalCores === Runtime.getRuntime.availableProcessors())
- case _ => fail()
- }
- }
-
test("local-n") {
val sched = createTaskScheduler("local[5]")
assert(sched.maxTaskFailures === 1)