You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by charles_cai <16...@qq.com> on 2020/06/03 05:44:27 UTC

NoClassDefFoundError: scala/Product$class

Hi,

I run the GATK MarkDuplicates in Spark mode and it throws an
*NoClassDefFoundError: scala/Product$class*. The GATK version is 4.1.7 and
4.0.0,the environment is: spark-3.0.0, scala-2.11.12

*GATK commands:*

gatk MarkDuplicatesSpark \
-I hdfs://master2:9000/Drosophila/output/Drosophila.sorted.bam \
-O hdfs://master2:9000/Drosophila/output/Drosophila.sorted.markdup.bam \
-M
hdfs://master2:9000/Drosophila/output/Drosophila.sorted.markdup_metrics.txt
\
-- \
--spark-runner SPARK --spark-master spark://master2:7077

*error logs:*

Exception in thread "main" java.lang.NoClassDefFoundError:
scala/Product$class 
       at
org.bdgenomics.adam.serialization.InputStreamWithDecoder.<init>(ADAMKryoRegistrator.scala:35) 
       at
org.bdgenomics.adam.serialization.AvroSerializer.<init>(ADAMKryoRegistrator.scala:45) 
       at
org.bdgenomics.adam.models.VariantContextSerializer.<init>(VariantContext.scala:94) 
       at
org.bdgenomics.adam.serialization.ADAMKryoRegistrator.registerClasses(ADAMKryoRegistrator.scala:179) 
       at
org.broadinstitute.hellbender.engine.spark.GATKRegistrator.registerClasses(GATKRegistrator.java:78) 
       at
org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$8(KryoSerializer.scala:170) 
       at
org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$8$adapted(KryoSerializer.scala:170) 
       at scala.Option.foreach(Option.scala:407) 
       at
org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$5(KryoSerializer.scala:170) 
       at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) 
       at
org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:221) 
       at
org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:161) 
       at
org.apache.spark.serializer.KryoSerializer$$anon$1.create(KryoSerializer.scala:102) 
       at
com.esotericsoftware.kryo.pool.KryoPoolQueueImpl.borrow(KryoPoolQueueImpl.java:48) 
       at
org.apache.spark.serializer.KryoSerializer$PoolWrapper.borrow(KryoSerializer.scala:109) 
       at
org.apache.spark.serializer.KryoSerializerInstance.borrowKryo(KryoSerializer.scala:336) 
       at
org.apache.spark.serializer.KryoSerializationStream.<init>(KryoSerializer.scala:256) 
       at
org.apache.spark.serializer.KryoSerializerInstance.serializeStream(KryoSerializer.scala:422) 
       at
org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:309) 
       at
org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:137) 
       at
org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:91) 
       at
org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:35) 
       at
org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:77) 
       at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1494) 
       at org.apache.spark.rdd.NewHadoopRDD.<init>(NewHadoopRDD.scala:80) 
       at
org.apache.spark.SparkContext.$anonfun$newAPIHadoopFile$2(SparkContext.scala:1235) 
       at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
       at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) 
       at org.apache.spark.SparkContext.withScope(SparkContext.scala:771) 
       at
org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:1221) 
       at
org.apache.spark.api.java.JavaSparkContext.newAPIHadoopFile(JavaSparkContext.scala:484) 
       at
org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSource.getParallelReads(ReadsSparkSource
.java:112) 
       at
org.broadinstitute.hellbender.engine.spark.GATKSparkTool.getUnfilteredReads(GATKSparkTool.java:254) 
       at
org.broadinstitute.hellbender.engine.spark.GATKSparkTool.getReads(GATKSparkTool.java:220) 
       at
org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark.runTool(MarkDupli
catesSpark.java:72) 
       at
org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:387) 
       at
org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:30
) 
       at
org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:136) 
       at
org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.jav
a:179) 
       at
org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:198) 
       at
org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:152) 
       at org.broadinstitute.hellbender.Main.mainEntry(Main.java:195) 
       at org.broadinstitute.hellbender.Main.main(Main.java:275) 
       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
       at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
       at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
       at java.lang.reflect.Method.invoke(Method.java:498) 
       at
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) 
       at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928) 
       at
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) 
       at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) 
       at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) 
       at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007) 
       at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016) 
       at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
Caused by: java.lang.ClassNotFoundException: scala.Product$class 
       at java.lang.ClassLoader.findClass(ClassLoader.java:523) 
       at
org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.java:35) 
       at java.lang.ClassLoader.loadClass(ClassLoader.java:418) 
       at
org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40) 
       at
org.apache.spark.util.ChildFirstURLClassLoader.loadClass(ChildFirstURLClassLoader.java:48) 
       at java.lang.ClassLoader.loadClass(ClassLoader.java:351) 
       ... 55 more

Thanks
charles



--
Sent from: http://apache-spark-user-list.1001560.n3.nabble.com/

---------------------------------------------------------------------
To unsubscribe e-mail: user-unsubscribe@spark.apache.org


Re: NoClassDefFoundError: scala/Product$class

Posted by Sean Owen <sr...@gmail.com>.
Spark 3 supports only Scala 2.12. This actually sounds like third party
library is compiled for 2.11 or something.

On Fri, Jun 5, 2020 at 11:11 PM charles_cai <16...@qq.com> wrote:

> Hi Pol,
>
> thanks for your suggestion, I am going to use Spark-3.0.0 for GPU
> acceleration,so I update the scala to the *version 2.12.11* and the latest
> *2.13* ,but the error is still there, and by the way , the Spark version is
> *spark-3.0.0-preview2-bin-without-hadoop*
>
> Caused by: java.lang.ClassNotFoundException: scala.Product$class
>         at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
>
> Charles cai
>
>
>
> --
> Sent from: http://apache-spark-user-list.1001560.n3.nabble.com/
>
> ---------------------------------------------------------------------
> To unsubscribe e-mail: user-unsubscribe@spark.apache.org
>
>

Re: NoClassDefFoundError: scala/Product$class

Posted by charles_cai <16...@qq.com>.
The org.bdgenomics.adam is one of the Components of the GATK, and I just
download the release version from its github website . However, when I build
a new  docker image with spark2.4.5 and scala 2.12.4,It works well and that
makes me confused.


root@master2:~# pyspark 
Python 2.7.17 (default, Apr 15 2020, 17:20:14) 
[GCC 7.5.0] on linux2
Type "help", "copyright", "credits" or "license" for more information.
20/06/08 01:44:16 WARN NativeCodeLoader: Unable to load native-hadoop
library for your platform... using builtin-java classes where applicable
Using Spark's default log4j profile:
org/apache/spark/log4j-defaults.properties
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use
setLogLevel(newLevel).
Welcome to
      ____              __
     / __/__  ___ _____/ /__
    _\ \/ _ \/ _ `/ __/  '_/
   /__ / .__/\_,_/_/ /_/\_\   version 2.4.5
      /_/

Using Python version 2.7.17 (default, Apr 15 2020 17:20:14)
SparkSession available as 'spark'.


root@master2:~# scala -version
Scala code runner version 2.12.4 -- Copyright 2002-2017, LAMP/EPFL and
Lightbend, Inc.




--
Sent from: http://apache-spark-user-list.1001560.n3.nabble.com/

---------------------------------------------------------------------
To unsubscribe e-mail: user-unsubscribe@spark.apache.org


Re: NoClassDefFoundError: scala/Product$class

Posted by James Moore <ja...@restphone.com>.
How are you depending on that org.bdgenomics.adam library?  Maybe you're
pulling the 2.11 version of that.

Re: NoClassDefFoundError: scala/Product$class

Posted by charles_cai <16...@qq.com>.
Hi Pol, 

thanks for your suggestion, I am going to use Spark-3.0.0 for GPU
acceleration,so I update the scala to the *version 2.12.11* and the latest
*2.13* ,but the error is still there, and by the way , the Spark version is
*spark-3.0.0-preview2-bin-without-hadoop*

Caused by: java.lang.ClassNotFoundException: scala.Product$class
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)

Charles cai



--
Sent from: http://apache-spark-user-list.1001560.n3.nabble.com/

---------------------------------------------------------------------
To unsubscribe e-mail: user-unsubscribe@spark.apache.org


Re: NoClassDefFoundError: scala/Product$class

Posted by Pol Santamaria <po...@qbeast.io>.
Hi Charles,

I believe Spark 3.0 removed the support for Scala 2.11, and that error is a
version compatibility issue. You should try Spark 2.4.5 with your current
setup (works with Scala 2.11 by default).

Pol Santamaria

On Wed, Jun 3, 2020 at 7:44 AM charles_cai <16...@qq.com> wrote:

> Hi,
>
> I run the GATK MarkDuplicates in Spark mode and it throws an
> *NoClassDefFoundError: scala/Product$class*. The GATK version is 4.1.7 and
> 4.0.0,the environment is: spark-3.0.0, scala-2.11.12
>
> *GATK commands:*
>
> gatk MarkDuplicatesSpark \
> -I hdfs://master2:9000/Drosophila/output/Drosophila.sorted.bam \
> -O hdfs://master2:9000/Drosophila/output/Drosophila.sorted.markdup.bam \
> -M
> hdfs://master2:9000/Drosophila/output/Drosophila.sorted.markdup_metrics.txt
> \
> -- \
> --spark-runner SPARK --spark-master spark://master2:7077
>
> *error logs:*
>
> Exception in thread "main" java.lang.NoClassDefFoundError:
> scala/Product$class
>        at
> org.bdgenomics.adam.serialization.InputStreamWithDecoder.<init>(ADAMKryoRegistrator.scala:35)
>
>        at
> org.bdgenomics.adam.serialization.AvroSerializer.<init>(ADAMKryoRegistrator.scala:45)
>
>        at
> org.bdgenomics.adam.models.VariantContextSerializer.<init>(VariantContext.scala:94)
>
>        at
> org.bdgenomics.adam.serialization.ADAMKryoRegistrator.registerClasses(ADAMKryoRegistrator.scala:179)
>
>        at
> org.broadinstitute.hellbender.engine.spark.GATKRegistrator.registerClasses(GATKRegistrator.java:78)
>
>        at
> org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$8(KryoSerializer.scala:170)
>
>        at
> org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$8$adapted(KryoSerializer.scala:170)
>
>        at scala.Option.foreach(Option.scala:407)
>        at
> org.apache.spark.serializer.KryoSerializer.$anonfun$newKryo$5(KryoSerializer.scala:170)
>
>        at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>        at
> org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:221)
>        at
> org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:161)
>
>        at
> org.apache.spark.serializer.KryoSerializer$$anon$1.create(KryoSerializer.scala:102)
>
>        at
> com.esotericsoftware.kryo.pool.KryoPoolQueueImpl.borrow(KryoPoolQueueImpl.java:48)
>
>        at
> org.apache.spark.serializer.KryoSerializer$PoolWrapper.borrow(KryoSerializer.scala:109)
>
>        at
> org.apache.spark.serializer.KryoSerializerInstance.borrowKryo(KryoSerializer.scala:336)
>
>        at
> org.apache.spark.serializer.KryoSerializationStream.<init>(KryoSerializer.scala:256)
>
>        at
> org.apache.spark.serializer.KryoSerializerInstance.serializeStream(KryoSerializer.scala:422)
>
>        at
> org.apache.spark.broadcast.TorrentBroadcast$.blockifyObject(TorrentBroadcast.scala:309)
>
>        at
> org.apache.spark.broadcast.TorrentBroadcast.writeBlocks(TorrentBroadcast.scala:137)
>
>        at
> org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:91)
>
>        at
> org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:35)
>
>        at
> org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:77)
>
>        at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1494)
>        at org.apache.spark.rdd.NewHadoopRDD.<init>(NewHadoopRDD.scala:80)
>        at
> org.apache.spark.SparkContext.$anonfun$newAPIHadoopFile$2(SparkContext.scala:1235)
>
>        at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>
>        at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
>
>        at org.apache.spark.SparkContext.withScope(SparkContext.scala:771)
>        at
> org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:1221)
>        at
> org.apache.spark.api.java.JavaSparkContext.newAPIHadoopFile(JavaSparkContext.scala:484)
>
>        at
>
> org.broadinstitute.hellbender.engine.spark.datasources.ReadsSparkSource.getParallelReads(ReadsSparkSource
> .java:112)
>        at
> org.broadinstitute.hellbender.engine.spark.GATKSparkTool.getUnfilteredReads(GATKSparkTool.java:254)
>
>        at
> org.broadinstitute.hellbender.engine.spark.GATKSparkTool.getReads(GATKSparkTool.java:220)
>
>        at
>
> org.broadinstitute.hellbender.tools.spark.transforms.markduplicates.MarkDuplicatesSpark.runTool(MarkDupli
> catesSpark.java:72)
>        at
> org.broadinstitute.hellbender.engine.spark.GATKSparkTool.runPipeline(GATKSparkTool.java:387)
>
>        at
>
> org.broadinstitute.hellbender.engine.spark.SparkCommandLineProgram.doWork(SparkCommandLineProgram.java:30
> )
>        at
> org.broadinstitute.hellbender.cmdline.CommandLineProgram.runTool(CommandLineProgram.java:136)
>
>        at
>
> org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMainPostParseArgs(CommandLineProgram.jav
> a:179)
>        at
> org.broadinstitute.hellbender.cmdline.CommandLineProgram.instanceMain(CommandLineProgram.java:198)
>
>        at
> org.broadinstitute.hellbender.Main.runCommandLineProgram(Main.java:152)
>        at org.broadinstitute.hellbender.Main.mainEntry(Main.java:195)
>        at org.broadinstitute.hellbender.Main.main(Main.java:275)
>        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>        at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>
>        at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>
>        at java.lang.reflect.Method.invoke(Method.java:498)
>        at
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
>
>        at
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928)
>
>        at
> org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
>        at
> org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
>        at
> org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
>        at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007)
>
>        at
> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016)
>        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.ClassNotFoundException: scala.Product$class
>        at java.lang.ClassLoader.findClass(ClassLoader.java:523)
>        at
> org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.java:35)
>
>        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
>        at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.java:40)
>
>        at
> org.apache.spark.util.ChildFirstURLClassLoader.loadClass(ChildFirstURLClassLoader.java:48)
>
>        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
>        ... 55 more
>
> Thanks
> charles
>
>
>
> --
> Sent from: http://apache-spark-user-list.1001560.n3.nabble.com/
>
> ---------------------------------------------------------------------
> To unsubscribe e-mail: user-unsubscribe@spark.apache.org
>
>