You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2014/01/13 07:36:10 UTC

[2/3] git commit: Removing mentions in tests

Removing mentions in tests


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/0bb33076
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/0bb33076
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/0bb33076

Branch: refs/heads/master
Commit: 0bb33076e2c12947edc91ff61f98e4b72d881ec3
Parents: 0d4886c
Author: Patrick Wendell <pw...@gmail.com>
Authored: Sun Jan 12 16:53:58 2014 -0800
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Sun Jan 12 16:53:58 2014 -0800

----------------------------------------------------------------------
 bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala     | 1 -
 .../org/apache/spark/executor/CoarseGrainedExecutorBackend.scala | 1 -
 .../spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala  | 2 +-
 core/src/test/scala/org/apache/spark/LocalSparkContext.scala     | 1 -
 core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala | 1 -
 .../test/scala/org/apache/spark/storage/BlockManagerSuite.scala  | 4 ----
 repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala        | 2 --
 .../src/main/scala/org/apache/spark/streaming/Checkpoint.scala   | 2 +-
 .../org/apache/spark/streaming/util/MasterFailureTest.scala      | 2 --
 .../org/apache/spark/streaming/LocalJavaStreamingContext.java    | 2 --
 .../test/scala/org/apache/spark/streaming/CheckpointSuite.scala  | 1 -
 .../test/scala/org/apache/spark/streaming/TestSuiteBase.scala    | 1 -
 12 files changed, 2 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
----------------------------------------------------------------------
diff --git a/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
index 7b954a4..9c37fad 100644
--- a/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
+++ b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
@@ -38,7 +38,6 @@ class BagelSuite extends FunSuite with Assertions with BeforeAndAfter with Timeo
     }
     // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
   }
 
   test("halting by voting") {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index f9e43e0..45b43b4 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -103,7 +103,6 @@ private[spark] object CoarseGrainedExecutorBackend {
       indestructible = true, conf = new SparkConf)
     // set it
     val sparkHostPort = hostname + ":" + boundPort
-//    conf.set("spark.hostPort",  sparkHostPort)
     actorSystem.actorOf(
       Props(classOf[CoarseGrainedExecutorBackend], driverUrl, executorId, sparkHostPort, cores),
       name = "Executor")

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index 8d596a7..0208388 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -165,7 +165,7 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, actorSystem: A
   override def start() {
     val properties = new ArrayBuffer[(String, String)]
     for ((key, value) <- scheduler.sc.conf.getAll) {
-      if (key.startsWith("spark.") && !key.equals("spark.hostPort")) {
+      if (key.startsWith("spark.")) {
         properties += ((key, value))
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index 8dd5786..3ac7061 100644
--- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -53,7 +53,6 @@ object LocalSparkContext {
     }
     // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
   }
 
   /** Runs `f` by passing in `sc` and ensures that `sc` is stopped. */

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index afc1bef..930c252 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -99,7 +99,6 @@ class MapOutputTrackerSuite extends FunSuite with LocalSparkContext {
     val hostname = "localhost"
     val (actorSystem, boundPort) = AkkaUtils.createActorSystem("spark", hostname, 0, conf = conf)
     System.setProperty("spark.driver.port", boundPort.toString)    // Will be cleared by LocalSparkContext
-    System.setProperty("spark.hostPort", hostname + ":" + boundPort)
 
     val masterTracker = new MapOutputTrackerMaster(conf)
     masterTracker.trackerActor = actorSystem.actorOf(

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index f60ce27..18aa587 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -53,7 +53,6 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
     val (actorSystem, boundPort) = AkkaUtils.createActorSystem("test", "localhost", 0, conf = conf)
     this.actorSystem = actorSystem
     conf.set("spark.driver.port", boundPort.toString)
-    conf.set("spark.hostPort", "localhost:" + boundPort)
 
     master = new BlockManagerMaster(
       actorSystem.actorOf(Props(new BlockManagerMasterActor(true, conf))), conf)
@@ -65,13 +64,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
     conf.set("spark.storage.disableBlockManagerHeartBeat", "true")
     val initialize = PrivateMethod[Unit]('initialize)
     SizeEstimator invokePrivate initialize()
-    // Set some value ...
-    conf.set("spark.hostPort", Utils.localHostName() + ":" + 1111)
   }
 
   after {
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
 
     if (store != null) {
       store.stop()

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
----------------------------------------------------------------------
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index daaa2a0..8aad273 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -35,7 +35,6 @@ class ReplSuite extends FunSuite {
     }
     // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
     return out.toString
   }
 
@@ -75,7 +74,6 @@ class ReplSuite extends FunSuite {
 
     interp.sparkContext.stop()
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
   }
 
   test("simple foreach with accumulator") {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
index 1249ef4..f586bae 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
@@ -46,7 +46,7 @@ class Checkpoint(@transient ssc: StreamingContext, val checkpointTime: Time)
 
   // These should be unset when a checkpoint is deserialized,
   // otherwise the SparkContext won't initialize correctly.
-  sparkConf.remove("spark.hostPort").remove("spark.driver.host").remove("spark.driver.port")
+  sparkConf.remove("spark.driver.host").remove("spark.driver.port")
 
   def validate() {
     assert(master != null, "Checkpoint.master is null")

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
index 162b19d..592e847 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
@@ -186,7 +186,6 @@ object MasterFailureTest extends Logging {
 
     // Setup the streaming computation with the given operation
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
     val ssc = new StreamingContext("local[4]", "MasterFailureTest", batchDuration, null, Nil, Map())
     ssc.checkpoint(checkpointDir.toString)
     val inputStream = ssc.textFileStream(testDir.toString)
@@ -233,7 +232,6 @@ object MasterFailureTest extends Logging {
         // (iii) Its not timed out yet
         System.clearProperty("spark.streaming.clock")
         System.clearProperty("spark.driver.port")
-        System.clearProperty("spark.hostPort")
         ssc.start()
         val startTime = System.currentTimeMillis()
         while (!killed && !isLastOutputGenerated && !isTimedOut) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/streaming/src/test/java/org/apache/spark/streaming/LocalJavaStreamingContext.java
----------------------------------------------------------------------
diff --git a/streaming/src/test/java/org/apache/spark/streaming/LocalJavaStreamingContext.java b/streaming/src/test/java/org/apache/spark/streaming/LocalJavaStreamingContext.java
index 34bee56..849bbf1 100644
--- a/streaming/src/test/java/org/apache/spark/streaming/LocalJavaStreamingContext.java
+++ b/streaming/src/test/java/org/apache/spark/streaming/LocalJavaStreamingContext.java
@@ -28,7 +28,6 @@ public abstract class LocalJavaStreamingContext {
     @Before
     public void setUp() {
         System.clearProperty("spark.driver.port");
-        System.clearProperty("spark.hostPort");
         System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock");
         ssc = new JavaStreamingContext("local[2]", "test", new Duration(1000));
         ssc.checkpoint("checkpoint");
@@ -41,6 +40,5 @@ public abstract class LocalJavaStreamingContext {
 
         // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
         System.clearProperty("spark.driver.port");
-        System.clearProperty("spark.hostPort");
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
----------------------------------------------------------------------
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
index 6499de9..65e7e5d 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
@@ -336,7 +336,6 @@ class CheckpointSuite extends TestSuiteBase {
     )
     ssc = new StreamingContext(checkpointDir)
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
     ssc.start()
     val outputNew = advanceTimeWithRealDelay[V](ssc, nextNumBatches)
     // the first element will be re-processed data of the last batch before restart

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0bb33076/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
----------------------------------------------------------------------
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
index b20d02f..1979a0c 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
@@ -156,7 +156,6 @@ trait TestSuiteBase extends FunSuite with BeforeAndAfter with Logging {
   def afterFunction() {
     // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
     System.clearProperty("spark.driver.port")
-    System.clearProperty("spark.hostPort")
   }
 
   before(beforeFunction)