You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Patrick Wendell (JIRA)" <ji...@apache.org> on 2014/05/06 21:01:21 UTC

[jira] [Resolved] (SPARK-1474) Spark on yarn assembly doesn't include org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter

     [ https://issues.apache.org/jira/browse/SPARK-1474?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Patrick Wendell resolved SPARK-1474.
------------------------------------

       Resolution: Fixed
    Fix Version/s: 1.0.0

Issue resolved by pull request 406
[https://github.com/apache/spark/pull/406]

> Spark on yarn assembly doesn't include org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter
> -------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-1474
>                 URL: https://issues.apache.org/jira/browse/SPARK-1474
>             Project: Spark
>          Issue Type: Bug
>            Reporter: Xuan Gong
>            Assignee: Thomas Graves
>             Fix For: 1.0.0
>
>
> part of the error logs 
> {code}
> 14/04/11 15:08:06 INFO JettyUtils: Adding filter: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter
> 14/04/11 15:08:06 WARN Holder: 
> java.lang.ClassNotFoundException: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter
> 	at java.net.URLClassLoader$1.run(URLClassLoader.java:202)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
> 	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
> 	at org.eclipse.jetty.util.Loader.loadClass(Loader.java:100)
> 	at org.eclipse.jetty.util.Loader.loadClass(Loader.java:79)
> 	at org.eclipse.jetty.servlet.Holder.doStart(Holder.java:107)
> 	at org.eclipse.jetty.servlet.FilterHolder.doStart(FilterHolder.java:90)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:768)
> 	at org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:265)
> 	at org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:717)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.eclipse.jetty.server.handler.HandlerCollection.doStart(HandlerCollection.java:229)
> 	at org.eclipse.jetty.server.handler.ContextHandlerCollection.doStart(ContextHandlerCollection.java:172)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.eclipse.jetty.server.handler.HandlerWrapper.doStart(HandlerWrapper.java:95)
> 	at org.eclipse.jetty.server.Server.doStart(Server.java:282)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.apache.spark.ui.JettyUtils$$anonfun$1.apply$mcV$sp(JettyUtils.scala:189)
> 	at org.apache.spark.ui.JettyUtils$$anonfun$1.apply(JettyUtils.scala:189)
> 	at org.apache.spark.ui.JettyUtils$$anonfun$1.apply(JettyUtils.scala:189)
> 	at scala.util.Try$.apply(Try.scala:161)
> 	at org.apache.spark.ui.JettyUtils$.connect$1(JettyUtils.scala:188)
> 	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:201)
> 	at org.apache.spark.ui.SparkUI.bind(SparkUI.scala:101)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:215)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:110)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:147)
> 	at org.apache.spark.examples.SparkPi$.main(SparkPi.scala:31)
> 	at org.apache.spark.examples.SparkPi.main(SparkPi.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2$$anonfun$run$1.apply$mcV$sp(ApplicationMaster.scala:184)
> 	at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:43)
> 	at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:42)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:394)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
> 	at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:42)
> 	at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:178)
> 14/04/11 15:08:06 WARN AbstractLifeCycle: FAILED org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter-364e50ee: javax.servlet.UnavailableException: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter
> javax.servlet.UnavailableException: org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter
> 	at org.eclipse.jetty.servlet.Holder.doStart(Holder.java:114)
> 	at org.eclipse.jetty.servlet.FilterHolder.doStart(FilterHolder.java:90)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:768)
> 	at org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:265)
> 	at org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:717)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.eclipse.jetty.server.handler.HandlerCollection.doStart(HandlerCollection.java:229)
> 	at org.eclipse.jetty.server.handler.ContextHandlerCollection.doStart(ContextHandlerCollection.java:172)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.eclipse.jetty.server.handler.HandlerWrapper.doStart(HandlerWrapper.java:95)
> 	at org.eclipse.jetty.server.Server.doStart(Server.java:282)
> 	at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
> 	at org.apache.spark.ui.JettyUtils$$anonfun$1.apply$mcV$sp(JettyUtils.scala:189)
> 	at org.apache.spark.ui.JettyUtils$$anonfun$1.apply(JettyUtils.scala:189)
> 	at org.apache.spark.ui.JettyUtils$$anonfun$1.apply(JettyUtils.scala:189)
> 	at scala.util.Try$.apply(Try.scala:161)
> 	at org.apache.spark.ui.JettyUtils$.connect$1(JettyUtils.scala:188)
> 	at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:201)
> 	at org.apache.spark.ui.SparkUI.bind(SparkUI.scala:101)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:215)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:110)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:147)
> 	at org.apache.spark.examples.SparkPi$.main(SparkPi.scala:31)
> 	at org.apache.spark.examples.SparkPi.main(SparkPi.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> 	at java.lang.reflect.Method.invoke(Method.java:597)
> 	at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2$$anonfun$run$1.apply$mcV$sp(ApplicationMaster.scala:184)
> 	at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:43)
> 	at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:42)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at javax.security.auth.Subject.doAs(Subject.java:394)
> 	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
> 	at org.apache.spark.deploy.SparkHadoopUtil.runAsUser(SparkHadoopUtil.scala:42)
> 	at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:178)
> {code}



--
This message was sent by Atlassian JIRA
(v6.2#6252)