You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Reynold Xin (JIRA)" <ji...@apache.org> on 2016/07/09 03:18:11 UTC

[jira] [Resolved] (SPARK-16376) [Spark web UI]:HTTP ERROR 500 when using rest api "/applications/[app-id]/jobs" if array "stageIds" is empty

     [ https://issues.apache.org/jira/browse/SPARK-16376?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Reynold Xin resolved SPARK-16376.
---------------------------------
       Resolution: Fixed
         Assignee: Sean Owen
    Fix Version/s: 2.0.0

> [Spark web UI]:HTTP ERROR 500 when using rest api "/applications/[app-id]/jobs" if array "stageIds" is empty
> ------------------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-16376
>                 URL: https://issues.apache.org/jira/browse/SPARK-16376
>             Project: Spark
>          Issue Type: Bug
>          Components: Web UI
>    Affects Versions: 2.0.0
>            Reporter: marymwu
>            Assignee: Sean Owen
>            Priority: Minor
>             Fix For: 2.0.0
>
>
> [Spark web UI]:HTTP ERROR 500 when using rest api "/applications/[app-id]/jobs" if array "stageIds" is empty
> See attachment for reference.
> HTTP ERROR 500
> Problem accessing /api/v1/applications/application_1466239933301_175531/jobs. Reason:
>     Server Error
> Caused by:
> java.lang.UnsupportedOperationException: empty.max
> 	at scala.collection.TraversableOnce$class.max(TraversableOnce.scala:216)
> 	at scala.collection.AbstractTraversable.max(Traversable.scala:105)
> 	at org.apache.spark.status.api.v1.AllJobsResource$.convertJobData(AllJobsResource.scala:71)
> 	at org.apache.spark.status.api.v1.AllJobsResource$$anonfun$2$$anonfun$apply$2.apply(AllJobsResource.scala:46)
> 	at org.apache.spark.status.api.v1.AllJobsResource$$anonfun$2$$anonfun$apply$2.apply(AllJobsResource.scala:44)
> 	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
> 	at scala.collection.immutable.List.foreach(List.scala:318)
> 	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
> 	at org.apache.spark.status.api.v1.AllJobsResource$$anonfun$2.apply(AllJobsResource.scala:44)
> 	at org.apache.spark.status.api.v1.AllJobsResource$$anonfun$2.apply(AllJobsResource.scala:43)
> 	at scala.collection.TraversableLike$WithFilter$$anonfun$flatMap$2.apply(TraversableLike.scala:753)
> 	at scala.collection.immutable.List.foreach(List.scala:318)
> 	at scala.collection.TraversableLike$WithFilter.flatMap(TraversableLike.scala:752)
> 	at org.apache.spark.status.api.v1.AllJobsResource.jobsList(AllJobsResource.scala:43)
> 	
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:606)
> 	at com.sun.jersey.spi.container.JavaMethodInvokerFactory$1.invoke(JavaMethodInvokerFactory.java:60)
> 	at com.sun.jersey.server.impl.model.method.dispatch.AbstractResourceMethodDispatchProvider$TypeOutInvoker._dispatch(AbstractResourceMethodDispatchProvider.java:185)
> 	at com.sun.jersey.server.impl.model.method.dispatch.ResourceJavaMethodDispatcher.dispatch(ResourceJavaMethodDispatcher.java:75)
> 	at com.sun.jersey.server.impl.uri.rules.HttpMethodRule.accept(HttpMethodRule.java:288)
> 	at com.sun.jersey.server.impl.uri.rules.SubLocatorRule.accept(SubLocatorRule.java:134)
> 	at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
> 	at com.sun.jersey.server.impl.uri.rules.ResourceClassRule.accept(ResourceClassRule.java:108)
> 	at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
> 	at com.sun.jersey.server.impl.uri.rules.RootResourceClassesRule.accept(RootResourceClassesRule.java:84)
> 	at com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1469)
> 	at com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1400)
> 	at com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1349)
> 	at com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1339)
> 	at com.sun.jersey.spi.container.servlet.WebComponent.service(WebComponent.java:416)
> 	at com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:537)
> 	at com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:699)
> 	at javax.servlet.http.HttpServlet.service(HttpServlet.java:848)
> 	at org.spark-project.jetty.servlet.ServletHolder.handle(ServletHolder.java:684)
> 	at org.spark-project.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1496)
> 	at org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.doFilter(AmIpFilter.java:164)
> 	at org.spark-project.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1467)
> 	at org.spark-project.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:499)
> 	at org.spark-project.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086)
> 	at org.spark-project.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428)
> 	at org.spark-project.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020)
> 	at org.spark-project.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135)
> 	at org.spark-project.jetty.server.handler.GzipHandler.handle(GzipHandler.java:264)
> 	at org.spark-project.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:255)
> 	at org.spark-project.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116)
> 	at org.spark-project.jetty.server.Server.handle(Server.java:370)
> 	at org.spark-project.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:494)
> 	at org.spark-project.jetty.server.AbstractHttpConnection.headerComplete(AbstractHttpConnection.java:971)
> 	at org.spark-project.jetty.server.AbstractHttpConnection$RequestHandler.headerComplete(AbstractHttpConnection.java:1033)
> 	at org.spark-project.jetty.http.HttpParser.parseNext(HttpParser.java:644)
> 	at org.spark-project.jetty.http.HttpParser.parseAvailable(HttpParser.java:235)
> 	at org.spark-project.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82)
> 	at org.spark-project.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:667)
> 	at org.spark-project.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52)
> 	at org.spark-project.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608)
> 	at org.spark-project.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543)
> 	at java.lang.Thread.run(Thread.java:745)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org