You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Wwh 吴 <ww...@hotmail.com> on 2015/07/19 05:08:25 UTC

Spark1.4 application throw java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration

hi 
I have build a spark application  with IDEA. when run SparkPI , IDEA throw exception as that :
Exception in thread "main" java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration	at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:136)	at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:129)	at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:98)	at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:108)	at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:99)	at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:78)	at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)	at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)	at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:62)	at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:61)	at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:74)	at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:190)	at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:141)	at org.apache.spark.SparkContext.<init>(SparkContext.scala:440)	at org.learn.SparkPI$.main(SparkPI.scala:27)	at org.learn.SparkPI.main(SparkPI.scala)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:606)	at com.intellij.rt.execution.application.AppMain.main(AppMain.java:134)Caused by: java.lang.ClassNotFoundException: javax.servlet.FilterRegistration	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)	at java.security.AccessController.doPrivileged(Native Method)	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
And the application SparkPI like this:  def main(args:Array[String]): Unit = {    val conf = new SparkConf().setAppName("Spark Pi")
    conf.setMaster("local")
    val spark = new SparkContext(conf)    
    //spark.addJar("D:\\BigdataResearch\\SparkLeaning\\out\\artifacts\\sparkleaning_jar\\sparkleaning.jar")
    val slices = if (args.length > 0)args(0).toInt else 2
    val n = 100000 * slices
    val count = spark.parallelize(1 to n, slices).map{ i =>
      val x = random * 2 -1
      val y = random * 2 -1
      if (x*x + y*y < 1) 1 else 0
    }.reduce(_ + _)
    println("Pi is roughly" + 4.0 * count / n)
    spark.stop()
  }
}

And the build.sbt like this:name := "SparkLearning"
version := "1.0"
scalaVersion := "2.10.4"
libraryDependencies ++= Seq(
  "org.apache.hive"% "hive-jdbc" % "0.13.1" ,
   "org.apache.hadoop" % "hadoop-common" % "2.2.0" excludeAll ExclusionRule(organization = "javax.servlet"),
  "org.apache.hadoop" % "hadoop-client" % "2.2.0" excludeAll ExclusionRule(organization = "javax.servlet"),
  "org.scalatest" %% "scalatest" % "2.2.0" ,
  "org.apache.spark" %% "spark-core" % "1.4.0",
  "org.apache.spark" %% "spark-sql" % "1.4.0",
  "org.apache.spark" %% "spark-hive" % "1.4.0",
  "org.apache.spark" %% "spark-mllib" % "1.4.0",
  "org.apache.spark" %% "spark-streaming" % "1.4.0",
  "org.apache.spark" %% "spark-streaming-kafka" % "1.4.0" ,
  "org.eclipse.jetty"%"jetty-servlet"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-http"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-server"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-util"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-security"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-plus"%"8.1.14.v20131031",
  "org.apache.kafka"%%"kafka"%"0.8.2.1",
  "net.sf.json-lib"%"json-lib"%"2.4" from "http://gradle.artifactoryonline.com/gradle/libs/net/sf/json-lib/json-lib/2.4/json-lib-2.4-jdk15.jar",
  "com.databricks"%%"spark-csv"%"1.0.3"
)Please give me some suggestion ! 
 		 	   		  

RE: Spark1.4 application throw java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration

Posted by Wwh 吴 <ww...@hotmail.com>.
it is caused by conflict of dependency,spark-core1.4  need dependency of  javax.servlet-api:3.0.1it can be resolved to add dependency to built.sbt as that: "javax.servlet" % "javax.servlet-api" % "3.0.1",
From: wwyandotte@hotmail.com
To: user@spark.apache.org
Subject: Spark1.4 application throw java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration
Date: Sun, 19 Jul 2015 11:08:25 +0800




hi 
I have build a spark application  with IDEA. when run SparkPI , IDEA throw exception as that :
Exception in thread "main" java.lang.NoClassDefFoundError: javax/servlet/FilterRegistration	at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:136)	at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:129)	at org.spark-project.jetty.servlet.ServletContextHandler.<init>(ServletContextHandler.java:98)	at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:108)	at org.apache.spark.ui.JettyUtils$.createServletHandler(JettyUtils.scala:99)	at org.apache.spark.ui.WebUI.attachPage(WebUI.scala:78)	at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)	at org.apache.spark.ui.WebUI$$anonfun$attachTab$1.apply(WebUI.scala:62)	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)	at org.apache.spark.ui.WebUI.attachTab(WebUI.scala:62)	at org.apache.spark.ui.SparkUI.initialize(SparkUI.scala:61)	at org.apache.spark.ui.SparkUI.<init>(SparkUI.scala:74)	at org.apache.spark.ui.SparkUI$.create(SparkUI.scala:190)	at org.apache.spark.ui.SparkUI$.createLiveUI(SparkUI.scala:141)	at org.apache.spark.SparkContext.<init>(SparkContext.scala:440)	at org.learn.SparkPI$.main(SparkPI.scala:27)	at org.learn.SparkPI.main(SparkPI.scala)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:606)	at com.intellij.rt.execution.application.AppMain.main(AppMain.java:134)Caused by: java.lang.ClassNotFoundException: javax.servlet.FilterRegistration	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)	at java.security.AccessController.doPrivileged(Native Method)	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
And the application SparkPI like this:  def main(args:Array[String]): Unit = {    val conf = new SparkConf().setAppName("Spark Pi")
    conf.setMaster("local")
    val spark = new SparkContext(conf)    
    //spark.addJar("D:\\BigdataResearch\\SparkLeaning\\out\\artifacts\\sparkleaning_jar\\sparkleaning.jar")
    val slices = if (args.length > 0)args(0).toInt else 2
    val n = 100000 * slices
    val count = spark.parallelize(1 to n, slices).map{ i =>
      val x = random * 2 -1
      val y = random * 2 -1
      if (x*x + y*y < 1) 1 else 0
    }.reduce(_ + _)
    println("Pi is roughly" + 4.0 * count / n)
    spark.stop()
  }
}

And the build.sbt like this:name := "SparkLearning"
version := "1.0"
scalaVersion := "2.10.4"
libraryDependencies ++= Seq(
  "org.apache.hive"% "hive-jdbc" % "0.13.1" ,
   "org.apache.hadoop" % "hadoop-common" % "2.2.0" excludeAll ExclusionRule(organization = "javax.servlet"),
  "org.apache.hadoop" % "hadoop-client" % "2.2.0" excludeAll ExclusionRule(organization = "javax.servlet"),
  "org.scalatest" %% "scalatest" % "2.2.0" ,
  "org.apache.spark" %% "spark-core" % "1.4.0",
  "org.apache.spark" %% "spark-sql" % "1.4.0",
  "org.apache.spark" %% "spark-hive" % "1.4.0",
  "org.apache.spark" %% "spark-mllib" % "1.4.0",
  "org.apache.spark" %% "spark-streaming" % "1.4.0",
  "org.apache.spark" %% "spark-streaming-kafka" % "1.4.0" ,
  "org.eclipse.jetty"%"jetty-servlet"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-http"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-server"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-util"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-security"%"8.1.14.v20131031",
  "org.eclipse.jetty"%"jetty-plus"%"8.1.14.v20131031",
  "org.apache.kafka"%%"kafka"%"0.8.2.1",
  "net.sf.json-lib"%"json-lib"%"2.4" from "http://gradle.artifactoryonline.com/gradle/libs/net/sf/json-lib/json-lib/2.4/json-lib-2.4-jdk15.jar",
  "com.databricks"%%"spark-csv"%"1.0.3"
)Please give me some suggestion !