You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2016/02/18 21:14:34 UTC
spark git commit: [SPARK-13371][CORE][STRING]
TaskSetManager.dequeueSpeculativeTask compares Option and String directly.
Repository: spark
Updated Branches:
refs/heads/master 892b2dd6d -> 78562535f
[SPARK-13371][CORE][STRING] TaskSetManager.dequeueSpeculativeTask compares Option and String directly.
## What changes were proposed in this pull request?
Fix some comparisons between unequal types that cause IJ warnings and in at least one case a likely bug (TaskSetManager)
## How was the this patch tested?
Running Jenkins tests
Author: Sean Owen <so...@cloudera.com>
Closes #11253 from srowen/SPARK-13371.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/78562535
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/78562535
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/78562535
Branch: refs/heads/master
Commit: 78562535feb6e214520b29e0bbdd4b1302f01e93
Parents: 892b2dd
Author: Sean Owen <so...@cloudera.com>
Authored: Thu Feb 18 12:14:30 2016 -0800
Committer: Andrew Or <an...@databricks.com>
Committed: Thu Feb 18 12:14:30 2016 -0800
----------------------------------------------------------------------
.../scala/org/apache/spark/deploy/FaultToleranceTest.scala | 2 +-
.../main/scala/org/apache/spark/scheduler/TaskSetManager.scala | 2 +-
core/src/test/scala/org/apache/spark/CheckpointSuite.scala | 4 ++--
core/src/test/scala/org/apache/spark/PartitioningSuite.scala | 4 ++--
core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala | 6 +++++-
5 files changed, 11 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/78562535/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 15d220d..434aadd 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -252,7 +252,7 @@ private object FaultToleranceTest extends App with Logging {
val f = Future {
try {
val res = sc.parallelize(0 until 10).collect()
- assertTrue(res.toList == (0 until 10))
+ assertTrue(res.toList == (0 until 10).toList)
true
} catch {
case e: Exception =>
http://git-wip-us.apache.org/repos/asf/spark/blob/78562535/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
index 05cfa52..2b0eab7 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
@@ -338,7 +338,7 @@ private[spark] class TaskSetManager(
if (TaskLocality.isAllowed(locality, TaskLocality.RACK_LOCAL)) {
for (rack <- sched.getRackForHost(host)) {
for (index <- speculatableTasks if canRunOnHost(index)) {
- val racks = tasks(index).preferredLocations.map(_.host).map(sched.getRackForHost)
+ val racks = tasks(index).preferredLocations.map(_.host).flatMap(sched.getRackForHost)
if (racks.contains(rack)) {
speculatableTasks -= index
return Some((index, TaskLocality.RACK_LOCAL))
http://git-wip-us.apache.org/repos/asf/spark/blob/78562535/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index ce35856..9f94e36 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -54,7 +54,7 @@ trait RDDCheckpointTester { self: SparkFunSuite =>
// Generate the final RDD using given RDD operation
val baseRDD = generateFatRDD()
val operatedRDD = op(baseRDD)
- val parentRDD = operatedRDD.dependencies.headOption.orNull
+ val parentDependency = operatedRDD.dependencies.headOption.orNull
val rddType = operatedRDD.getClass.getSimpleName
val numPartitions = operatedRDD.partitions.length
@@ -82,7 +82,7 @@ trait RDDCheckpointTester { self: SparkFunSuite =>
}
// Test whether dependencies have been changed from its earlier parent RDD
- assert(operatedRDD.dependencies.head.rdd != parentRDD)
+ assert(operatedRDD.dependencies.head != parentDependency)
// Test whether the partitions have been changed from its earlier partitions
assert(operatedRDD.partitions.toList != partitionsBeforeCheckpoint.toList)
http://git-wip-us.apache.org/repos/asf/spark/blob/78562535/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index aa80287..3d31c78 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -163,8 +163,8 @@ class PartitioningSuite extends SparkFunSuite with SharedSparkContext with Priva
val hashP2 = new HashPartitioner(2)
assert(rangeP2 === rangeP2)
assert(hashP2 === hashP2)
- assert(hashP2 != rangeP2)
- assert(rangeP2 != hashP2)
+ assert(hashP2 !== rangeP2)
+ assert(rangeP2 !== hashP2)
}
test("partitioner preservation") {
http://git-wip-us.apache.org/repos/asf/spark/blob/78562535/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
index aa22f3b..b0a35fe 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -289,7 +289,11 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
JInt(stageId) <- stage \ "stageId"
JInt(attemptId) <- stage \ "attemptId"
} {
- val exp = if (attemptId == 0 && stageId == 1) StageStatus.FAILED else StageStatus.COMPLETE
+ val exp = if (attemptId.toInt == 0 && stageId.toInt == 1) {
+ StageStatus.FAILED
+ } else {
+ StageStatus.COMPLETE
+ }
status should be (exp.name())
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org