You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "sangshenghong (JIRA)" <ji...@apache.org> on 2016/08/23 02:36:20 UTC

[jira] [Updated] (SPARK-17196) Can not initializeing SparkConent plus Kerberos env

     [ https://issues.apache.org/jira/browse/SPARK-17196?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

sangshenghong updated SPARK-17196:
----------------------------------
    Description: 
When we submit a application and get the following exception :
java.lang.ClassNotFoundException: org.spark_project.protobuf.GeneratedMessage
	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
	at java.security.AccessController.doPrivileged(Native Method)
	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
	at com.spss.utilities.classloading.dynamicclassloader.ChildFirstDynamicClassLoader.loadClass(ChildFirstDynamicClassLoader.java:108)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:274)
	at akka.actor.ReflectiveDynamicAccess$$anonfun$getClassFor$1.apply(DynamicAccess.scala:67)
	at akka.actor.ReflectiveDynamicAccess$$anonfun$getClassFor$1.apply(DynamicAccess.scala:66)
	at scala.util.Try$.apply(Try.scala:161)
	at akka.actor.ReflectiveDynamicAccess.getClassFor(DynamicAccess.scala:66)
	at akka.serialization.Serialization$$anonfun$6.apply(Serialization.scala:181)
	at akka.serialization.Serialization$$anonfun$6.apply(Serialization.scala:181)
	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
	at scala.collection.immutable.HashMap$HashMap1.foreach(HashMap.scala:224)
	at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:403)
	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
	at akka.serialization.Serialization.<init>(Serialization.scala:181)
	at akka.serialization.SerializationExtension$.createExtension(SerializationExtension.scala:15)
	at akka.serialization.SerializationExtension$.createExtension(SerializationExtension.scala:12)
	at akka.actor.ActorSystemImpl.registerExtension(ActorSystem.scala:713)
	at akka.actor.ExtensionId$class.apply(Extension.scala:79)
	at akka.serialization.SerializationExtension$.apply(SerializationExtension.scala:12)
	at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:175)
	at akka.actor.ActorSystemImpl.liftedTree2$1(ActorSystem.scala:620)
	at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:617)
	at akka.actor.ActorSystemImpl._start(ActorSystem.scala:617)
	at akka.actor.ActorSystemImpl.start(ActorSystem.scala:634)
	at akka.actor.ActorSystem$.apply(ActorSystem.scala:142)
	at akka.actor.ActorSystem$.apply(ActorSystem.scala:119)
	at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
	at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
	at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
	at org.apache.spark.rpc.akka.AkkaRpcEnvFactory.create(AkkaRpcEnv.scala:253)
	at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:75)


Also I checked spark assembly jar file and do not find the package  org.spark_project ,just find the org\spark-project\. In the 1.3.1 version,it do exist package "org.spark_project".

  was:
When we submit a application and get the following exception :
java.lang.ClassNotFoundException: org.spark_project.protobuf.GeneratedMessage
	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
	at java.security.AccessController.doPrivileged(Native Method)
	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
	at com.spss.utilities.classloading.dynamicclassloader.ChildFirstDynamicClassLoader.loadClass(ChildFirstDynamicClassLoader.java:108)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:274)
	at akka.actor.ReflectiveDynamicAccess$$anonfun$getClassFor$1.apply(DynamicAccess.scala:67)
	at akka.actor.ReflectiveDynamicAccess$$anonfun$getClassFor$1.apply(DynamicAccess.scala:66)
	at scala.util.Try$.apply(Try.scala:161)
	at akka.actor.ReflectiveDynamicAccess.getClassFor(DynamicAccess.scala:66)
	at akka.serialization.Serialization$$anonfun$6.apply(Serialization.scala:181)
	at akka.serialization.Serialization$$anonfun$6.apply(Serialization.scala:181)
	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
	at scala.collection.immutable.HashMap$HashMap1.foreach(HashMap.scala:224)
	at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:403)
	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
	at akka.serialization.Serialization.<init>(Serialization.scala:181)
	at akka.serialization.SerializationExtension$.createExtension(SerializationExtension.scala:15)
	at akka.serialization.SerializationExtension$.createExtension(SerializationExtension.scala:12)
	at akka.actor.ActorSystemImpl.registerExtension(ActorSystem.scala:713)
	at akka.actor.ExtensionId$class.apply(Extension.scala:79)
	at akka.serialization.SerializationExtension$.apply(SerializationExtension.scala:12)
	at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:175)
	at akka.actor.ActorSystemImpl.liftedTree2$1(ActorSystem.scala:620)
	at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:617)
	at akka.actor.ActorSystemImpl._start(ActorSystem.scala:617)
	at akka.actor.ActorSystemImpl.start(ActorSystem.scala:634)
	at akka.actor.ActorSystem$.apply(ActorSystem.scala:142)
	at akka.actor.ActorSystem$.apply(ActorSystem.scala:119)
	at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
	at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
	at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
	at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
	at org.apache.spark.rpc.akka.AkkaRpcEnvFactory.create(AkkaRpcEnv.scala:253)
	at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:75)


> Can not initializeing SparkConent plus Kerberos env
> ---------------------------------------------------
>
>                 Key: SPARK-17196
>                 URL: https://issues.apache.org/jira/browse/SPARK-17196
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 1.5.2
>         Environment: HDP 2.3.4(Spark 1.5.2)+Kerberos
>            Reporter: sangshenghong
>
> When we submit a application and get the following exception :
> java.lang.ClassNotFoundException: org.spark_project.protobuf.GeneratedMessage
> 	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
> 	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
> 	at java.security.AccessController.doPrivileged(Native Method)
> 	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> 	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> 	at com.spss.utilities.classloading.dynamicclassloader.ChildFirstDynamicClassLoader.loadClass(ChildFirstDynamicClassLoader.java:108)
> 	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> 	at java.lang.Class.forName0(Native Method)
> 	at java.lang.Class.forName(Class.java:274)
> 	at akka.actor.ReflectiveDynamicAccess$$anonfun$getClassFor$1.apply(DynamicAccess.scala:67)
> 	at akka.actor.ReflectiveDynamicAccess$$anonfun$getClassFor$1.apply(DynamicAccess.scala:66)
> 	at scala.util.Try$.apply(Try.scala:161)
> 	at akka.actor.ReflectiveDynamicAccess.getClassFor(DynamicAccess.scala:66)
> 	at akka.serialization.Serialization$$anonfun$6.apply(Serialization.scala:181)
> 	at akka.serialization.Serialization$$anonfun$6.apply(Serialization.scala:181)
> 	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
> 	at scala.collection.immutable.HashMap$HashMap1.foreach(HashMap.scala:224)
> 	at scala.collection.immutable.HashMap$HashTrieMap.foreach(HashMap.scala:403)
> 	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
> 	at akka.serialization.Serialization.<init>(Serialization.scala:181)
> 	at akka.serialization.SerializationExtension$.createExtension(SerializationExtension.scala:15)
> 	at akka.serialization.SerializationExtension$.createExtension(SerializationExtension.scala:12)
> 	at akka.actor.ActorSystemImpl.registerExtension(ActorSystem.scala:713)
> 	at akka.actor.ExtensionId$class.apply(Extension.scala:79)
> 	at akka.serialization.SerializationExtension$.apply(SerializationExtension.scala:12)
> 	at akka.remote.RemoteActorRefProvider.init(RemoteActorRefProvider.scala:175)
> 	at akka.actor.ActorSystemImpl.liftedTree2$1(ActorSystem.scala:620)
> 	at akka.actor.ActorSystemImpl._start$lzycompute(ActorSystem.scala:617)
> 	at akka.actor.ActorSystemImpl._start(ActorSystem.scala:617)
> 	at akka.actor.ActorSystemImpl.start(ActorSystem.scala:634)
> 	at akka.actor.ActorSystem$.apply(ActorSystem.scala:142)
> 	at akka.actor.ActorSystem$.apply(ActorSystem.scala:119)
> 	at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
> 	at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
> 	at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
> 	at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1920)
> 	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
> 	at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1911)
> 	at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
> 	at org.apache.spark.rpc.akka.AkkaRpcEnvFactory.create(AkkaRpcEnv.scala:253)
> 	at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53)
> 	at org.apache.spark.SparkEnv$.create(SparkEnv.scala:254)
> 	at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:194)
> 	at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:277)
> 	at org.apache.spark.SparkContext.<init>(SparkContext.scala:450)
> 	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:61)
> 	at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:75)
> Also I checked spark assembly jar file and do not find the package  org.spark_project ,just find the org\spark-project\. In the 1.3.1 version,it do exist package "org.spark_project".



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org