You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2013/12/25 01:35:35 UTC

[04/20] git commit: Cleaned up imports and fixed test bug

Cleaned up imports and fixed test bug


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/fb64828b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/fb64828b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/fb64828b

Branch: refs/heads/master
Commit: fb64828b0b573f3a77938592f168af7aa3a2b6c5
Parents: a124658
Author: Kay Ousterhout <ka...@gmail.com>
Authored: Thu Oct 31 23:42:56 2013 -0700
Committer: Kay Ousterhout <ka...@gmail.com>
Committed: Thu Oct 31 23:42:56 2013 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/scheduler/TaskScheduler.scala    | 3 +--
 .../scala/org/apache/spark/scheduler/TaskSetManager.scala   | 1 -
 .../org/apache/spark/scheduler/DAGSchedulerSuite.scala      | 9 +++++----
 3 files changed, 6 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/fb64828b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
index 3f694dd..b4ec695 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
@@ -27,7 +27,6 @@ import scala.collection.mutable.HashSet
 
 import org.apache.spark._
 import org.apache.spark.TaskState.TaskState
-import org.apache.spark.scheduler._
 import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
 
 /**
@@ -449,7 +448,7 @@ private[spark] class TaskScheduler(val sc: SparkContext, isLocal: Boolean = fals
 }
 
 
-object TaskScheduler {
+private[spark] object TaskScheduler {
   /**
    * Used to balance containers across hosts.
    *

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/fb64828b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
index 13271b1..90b6519 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
@@ -28,7 +28,6 @@ import scala.math.min
 import org.apache.spark.{ExceptionFailure, FetchFailed, Logging, Resubmitted, SparkEnv,
   Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState}
 import org.apache.spark.TaskState.TaskState
-import org.apache.spark.scheduler._
 import org.apache.spark.util.{SystemClock, Clock}
 
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/fb64828b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 394a1bb..5b5a217 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -36,14 +36,15 @@ import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
 /**
  * TaskScheduler that records the task sets that the DAGScheduler requested executed.
  */
-class TaskSetRecordingTaskScheduler(sc: SparkContext) extends TaskScheduler(sc) {
+class TaskSetRecordingTaskScheduler(sc: SparkContext,
+  mapOutputTrackerMaster: MapOutputTrackerMaster) extends TaskScheduler(sc) {
   /** Set of TaskSets the DAGScheduler has requested executed. */
   val taskSets = scala.collection.mutable.Buffer[TaskSet]()
   override def start() = {}
   override def stop() = {}
   override def submitTasks(taskSet: TaskSet) = {
     // normally done by TaskSetManager
-    taskSet.tasks.foreach(_.epoch = mapOutputTracker.getEpoch)
+    taskSet.tasks.foreach(_.epoch = mapOutputTrackerMaster.getEpoch)
     taskSets += taskSet
   }
   override def cancelTasks(stageId: Int) {}
@@ -97,11 +98,11 @@ class DAGSchedulerSuite extends FunSuite with BeforeAndAfter with LocalSparkCont
 
   before {
     sc = new SparkContext("local", "DAGSchedulerSuite")
-    taskScheduler = new TaskSetRecordingTaskScheduler(sc)
+    mapOutputTracker = new MapOutputTrackerMaster()
+    taskScheduler = new TaskSetRecordingTaskScheduler(sc, mapOutputTracker)
     taskScheduler.taskSets.clear()
     cacheLocations.clear()
     results.clear()
-    mapOutputTracker = new MapOutputTrackerMaster()
     scheduler = new DAGScheduler(taskScheduler, mapOutputTracker, blockManagerMaster, null) {
       override def runLocally(job: ActiveJob) {
         // don't bother with the thread while unit testing