You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Sean Owen (JIRA)" <ji...@apache.org> on 2015/05/15 14:53:00 UTC

[jira] [Resolved] (SPARK-5271) PySpark History Web UI issues

     [ https://issues.apache.org/jira/browse/SPARK-5271?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Sean Owen resolved SPARK-5271.
------------------------------
    Resolution: Not A Problem

> PySpark History Web UI issues
> -----------------------------
>
>                 Key: SPARK-5271
>                 URL: https://issues.apache.org/jira/browse/SPARK-5271
>             Project: Spark
>          Issue Type: Bug
>          Components: PySpark, Web UI
>    Affects Versions: 1.2.0
>         Environment: PySpark 1.2.0 in yarn-client mode
>            Reporter: Andrey Zimovnov
>
> After successful run of PySpark app via spark-submit in yarn-client mode on Hadoop 2.4 cluster the History UI shows the same as in issue SPARK-3898.
> {code}
> App Name:<Not Started> Started:1970/01/01 07:59:59 Spark User:<Not Started>
> Last Updated:2014/10/10 14:50:39
> Exception message:
> 2014-10-10 14:51:14,284 - ERROR - org.apache.spark.Logging$class.logError(Logging.scala:96) - qtp1594785497-16851 -Exception in parsing Spark event log hdfs://wscluster/sparklogs/24.3g_15_5g_2c-1412923684977/EVENT_LOG_1
> org.json4s.package$MappingException: Did not find value which can be converted into int
> at org.json4s.reflect.package$.fail(package.scala:96)
> at org.json4s.Extraction$.convert(Extraction.scala:554)
> at org.json4s.Extraction$.extract(Extraction.scala:331)
> at org.json4s.Extraction$.extract(Extraction.scala:42)
> at org.json4s.ExtractableJsonAstNode.extract(ExtractableJsonAstNode.scala:21)
> at org.apache.spark.util.JsonProtocol$.blockManagerIdFromJson(JsonProtocol.scala:647)
> at org.apache.spark.util.JsonProtocol$.blockManagerAddedFromJson(JsonProtocol.scala:468)
> at org.apache.spark.util.JsonProtocol$.sparkEventFromJson(JsonProtocol.scala:404)
> at org.apache.spark.scheduler.ReplayListenerBus$$anonfun$replay$2$$anonfun$apply$1.apply(ReplayListenerBus.scala:71)
> at org.apache.spark.scheduler.ReplayListenerBus$$anonfun$replay$2$$anonfun$apply$1.apply(ReplayListenerBus.scala:69)
> at scala.collection.Iterator$class.foreach(Iterator.scala:727)
> at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> at org.apache.spark.scheduler.ReplayListenerBus$$anonfun$replay$2.apply(ReplayListenerBus.scala:69)
> at org.apache.spark.scheduler.ReplayListenerBus$$anonfun$replay$2.apply(ReplayListenerBus.scala:55)
> at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
> at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:34)
> at org.apache.spark.scheduler.ReplayListenerBus.replay(ReplayListenerBus.scala:55)
> at org.apache.spark.deploy.history.FsHistoryProvider.org$apache$spark$deploy$history$FsHistoryProvider$$loadAppInfo(FsHistoryProvider.scala:181)
> at org.apache.spark.deploy.history.FsHistoryProvider.getAppUI(FsHistoryProvider.scala:99)
> at org.apache.spark.deploy.history.HistoryServer$$anon$3.load(HistoryServer.scala:55)
> at org.apache.spark.deploy.history.HistoryServer$$anon$3.load(HistoryServer.scala:53)
> at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
> at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at org.apache.spark.deploy.history.HistoryServer$$anon$1.doGet(HistoryServer.scala:88)
> at javax.servlet.http.HttpServlet.service(HttpServlet.java:735)
> at javax.servlet.http.HttpServlet.service(HttpServlet.java:848)
> at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:684)
> at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501)
> at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428)
> at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020)
> at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135)
> at org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:255)
> at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116)
> at org.eclipse.jetty.server.Server.handle(Server.java:370)
> at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:494)
> at org.eclipse.jetty.server.AbstractHttpConnection.headerComplete(AbstractHttpConnection.java:971)
> at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.headerComplete(AbstractHttpConnection.java:1033)
> at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:644)
> at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:235)
> at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82)
> at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:667)
> at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52)
> at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608)
> at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543)
> at java.lang.Thread.run(Thread.java:744)
> 2014-10-10 14:51:14,287 - ERROR - org.apache.spark.Logging$class.logError(Logging.scala:75) - qtp1594785497-16851 -Malformed line: {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":
> {"Executor ID":"<driver>","Host":"np03","Port":57192}
> ,"Maximum Memory":3333968363,"Timestamp":1412923683861}
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org