You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2015/06/07 06:09:58 UTC
spark git commit: [SPARK-8136] [YARN] Fix flakiness in
YarnClusterSuite.
Repository: spark
Updated Branches:
refs/heads/master 18c4fcebb -> ed2cc3ee8
[SPARK-8136] [YARN] Fix flakiness in YarnClusterSuite.
Instead of actually downloading the logs, just verify that the logs link is actually
a URL and is in the expected format.
Author: Hari Shreedharan <hs...@apache.org>
Closes #6680 from harishreedharan/simplify-am-log-tests and squashes the following commits:
3183aeb [Hari Shreedharan] Remove check for hostname which can fail on machines with several hostnames. Removed some unused imports.
50d69a7 [Hari Shreedharan] [SPARK-8136][YARN] Fix flakiness in YarnClusterSuite.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ed2cc3ee
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ed2cc3ee
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ed2cc3ee
Branch: refs/heads/master
Commit: ed2cc3ee890694ca0c1fa0bbc7186c8b80da3fab
Parents: 18c4fce
Author: Hari Shreedharan <hs...@apache.org>
Authored: Sat Jun 6 21:09:56 2015 -0700
Committer: Andrew Or <an...@databricks.com>
Committed: Sat Jun 6 21:09:56 2015 -0700
----------------------------------------------------------------------
.../apache/spark/deploy/yarn/YarnClusterSuite.scala | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/ed2cc3ee/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
----------------------------------------------------------------------
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index bc42e12..93d587d 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -18,12 +18,12 @@
package org.apache.spark.deploy.yarn
import java.io.{File, FileOutputStream, OutputStreamWriter}
+import java.net.URL
import java.util.Properties
import java.util.concurrent.TimeUnit
import scala.collection.JavaConversions._
import scala.collection.mutable
-import scala.io.Source
import com.google.common.base.Charsets.UTF_8
import com.google.common.io.ByteStreams
@@ -344,18 +344,20 @@ private object YarnClusterDriver extends Logging with Matchers {
assert(info.logUrlMap.nonEmpty)
}
- // If we are running in yarn-cluster mode, verify that driver logs are downloadable.
+ // If we are running in yarn-cluster mode, verify that driver logs links and present and are
+ // in the expected format.
if (conf.get("spark.master") == "yarn-cluster") {
assert(listener.driverLogs.nonEmpty)
val driverLogs = listener.driverLogs.get
assert(driverLogs.size === 2)
assert(driverLogs.containsKey("stderr"))
assert(driverLogs.containsKey("stdout"))
- val stderr = driverLogs("stderr") // YARN puts everything in stderr.
- val lines = Source.fromURL(stderr).getLines()
- // Look for a line that contains YarnClusterSchedulerBackend, since that is guaranteed in
- // cluster mode.
- assert(lines.exists(_.contains("YarnClusterSchedulerBackend")))
+ val urlStr = driverLogs("stderr")
+ // Ensure that this is a valid URL, else this will throw an exception
+ new URL(urlStr)
+ val containerId = YarnSparkHadoopUtil.get.getContainerId
+ val user = Utils.getCurrentUserName()
+ assert(urlStr.endsWith(s"/node/containerlogs/$containerId/$user/stderr?start=0"))
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org