You are viewing a plain text version of this content. The canonical link for it is here.
Posted to users@zeppelin.apache.org by Nirav Patel <np...@xactlycorp.com> on 2016/11/17 22:24:26 UTC

zeppelin spark sql - ClassNotFoundException: $line70280873551.$read$

Recently I started to getting following error upon execution of spark sql.


validInputDocs.createOrReplaceTempView("valInput")

%sql select count(*) from valInput //Fails with ClassNotFoundException
exception

But validInputDocs.show works just fine.

ANy interpreter settings that may have affected sqlContext or classpath?


java.lang.ClassNotFoundException: $line70280873551.$read$
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:225)
at
org.apache.spark.sql.catalyst.encoders.OuterScopes$$anonfun$getOuterScope$1.apply(OuterScopes.scala:62)
at
org.apache.spark.sql.catalyst.expressions.objects.NewInstance$$anonfun$13.apply(objects.scala:238)
at
org.apache.spark.sql.catalyst.expressions.objects.NewInstance$$anonfun$13.apply(objects.scala:238)
at scala.Option.map(Option.scala:146)
at
org.apache.spark.sql.catalyst.expressions.objects.NewInstance.doGenCode(objects.scala:238)
at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
at scala.Option.getOrElse(Option.scala:121)
at
org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
at
org.apache.spark.sql.catalyst.expressions.objects.Invoke$$anonfun$6.apply(objects.scala:134)
at
org.apache.spark.sql.catalyst.expressions.objects.Invoke$$anonfun$6.apply(objects.scala:134)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.immutable.List.map(List.scala:285)
at
org.apache.spark.sql.catalyst.expressions.objects.Invoke.doGenCode(objects.scala:134)
at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
at scala.Option.getOrElse(Option.scala:121)
at
org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)
at org.apache.spark.sql.execution.FilterExec.org
$apache$spark$sql$execution$FilterExec$$genPredicate$1(basicPhysicalOperators.scala:127)
at
org.apache.spark.sql.execution.FilterExec$$anonfun$12.apply(basicPhysicalOperators.scala:169)
at
org.apache.spark.sql.execution.FilterExec$$anonfun$12.apply(basicPhysicalOperators.scala:153)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.immutable.List.foreach(List.scala:381)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at scala.collection.immutable.List.map(List.scala:285)
at
org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:153)
at
org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
at
org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)
at
org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at
org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)
at
org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at
org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)
at
org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at
org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.doProduceWithKeys(HashAggregateExec.scala:526)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.doProduce(HashAggregateExec.scala:145)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
at
org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at
org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.produce(HashAggregateExec.scala:37)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:309)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:347)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:138)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:361)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.TakeOrderedAndProjectExec.executeCollect(limit.scala:128)
at
org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2183)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)
at org.apache.spark.sql.Dataset.org
$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2182)
at org.apache.spark.sql.Dataset.org
$apache$spark$sql$Dataset$$collect(Dataset.scala:2189)
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1925)
at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1924)
at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2562)
at org.apache.spark.sql.Dataset.head(Dataset.scala:1924)
at org.apache.spark.sql.Dataset.take(Dataset.scala:2139)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.zeppelin.spark.ZeppelinContext.showDF(ZeppelinContext.java:216)
at
org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(SparkSqlInterpreter.java:129)
at
org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:94)
at
org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:341)
at org.apache.zeppelin.scheduler.Job.run(Job.java:176)
at
org.apache.zeppelin.scheduler.ParallelScheduler$JobRunner.run(ParallelScheduler.java:162)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
at
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)

-- 


[image: What's New with Xactly] <http://www.xactlycorp.com/email-click/>

<https://www.nyse.com/quote/XNYS:XTLY>  [image: LinkedIn] 
<https://www.linkedin.com/company/xactly-corporation>  [image: Twitter] 
<https://twitter.com/Xactly>  [image: Facebook] 
<https://www.facebook.com/XactlyCorp>  [image: YouTube] 
<http://www.youtube.com/xactlycorporation>

Re: zeppelin spark sql - ClassNotFoundException: $line70280873551.$read$

Posted by Nirav Patel <np...@xactlycorp.com>.
Actually its started happening again today ! I tried restarting spark
interpreter but no luck.

%sql select count(*) from valInput //Fails with ClassNotFoundException
exception

java.lang.ClassNotFoundException: $line85378294951.$read$
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:225)
at
org.apache.spark.sql.catalyst.encoders.OuterScopes$$anonfun$getOuterScope$1.apply(OuterScopes.scala:62)
at
org.apache.spark.sql.catalyst.expressions.objects.NewInstance$$anonfun$13.apply(objects.scala:238)
at
org.apache.spark.sql.catalyst.expressions.objects.NewInstance$$anonfun$13.apply(objects.scala:238)
at scala.Option.map(Option.scala:146)
at
org.apache.spark.sql.catalyst.expressions.objects.NewInstance.doGenCode(objects.scala:238)
at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)
at
org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)
at scala.Option.getOrElse(Option.scala:121)

On Thu, Nov 17, 2016 at 2:35 PM, Nirav Patel <np...@xactlycorp.com> wrote:

> Never mind. I had logically incorrect transformation.
>
> val validInputDocs = inputDocsDs.filter(doc => {
>
>     (doc.labelIdx != -1 &&  doc.label != "test ") // Predicate should be
> !"test".equals(doc.label) ; I copied incorrect one from sql statement I
> wrote earlier :)
>   })
>
> Fixing above seem to resolve issue . It's interesting though as its not
> syntactically incorrect.
>
>
> On Thu, Nov 17, 2016 at 2:24 PM, Nirav Patel <np...@xactlycorp.com>
> wrote:
>
>> Recently I started to getting following error upon execution of spark sql.
>>
>>
>> validInputDocs.createOrReplaceTempView("valInput")
>>
>> %sql select count(*) from valInput //Fails with ClassNotFoundException
>> exception
>>
>> But validInputDocs.show works just fine.
>>
>> ANy interpreter settings that may have affected sqlContext or classpath?
>>
>>
>> java.lang.ClassNotFoundException: $line70280873551.$read$
>> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
>> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>> at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>> at java.lang.Class.forName0(Native Method)
>> at java.lang.Class.forName(Class.java:348)
>> at org.apache.spark.util.Utils$.classForName(Utils.scala:225)
>> at org.apache.spark.sql.catalyst.encoders.OuterScopes$$anonfun$
>> getOuterScope$1.apply(OuterScopes.scala:62)
>> at org.apache.spark.sql.catalyst.expressions.objects.NewInstanc
>> e$$anonfun$13.apply(objects.scala:238)
>> at org.apache.spark.sql.catalyst.expressions.objects.NewInstanc
>> e$$anonfun$13.apply(objects.scala:238)
>> at scala.Option.map(Option.scala:146)
>> at org.apache.spark.sql.catalyst.expressions.objects.NewInstanc
>> e.doGenCode(objects.scala:238)
>> at org.apache.spark.sql.catalyst.expressions.Expression$$anonfu
>> n$genCode$2.apply(Expression.scala:104)
>> at org.apache.spark.sql.catalyst.expressions.Expression$$anonfu
>> n$genCode$2.apply(Expression.scala:101)
>> at scala.Option.getOrElse(Option.scala:121)
>> at org.apache.spark.sql.catalyst.expressions.Expression.genCode
>> (Expression.scala:101)
>> at org.apache.spark.sql.catalyst.expressions.objects.Invoke$$an
>> onfun$6.apply(objects.scala:134)
>> at org.apache.spark.sql.catalyst.expressions.objects.Invoke$$an
>> onfun$6.apply(objects.scala:134)
>> at scala.collection.TraversableLike$$anonfun$map$1.apply(
>> TraversableLike.scala:234)
>> at scala.collection.TraversableLike$$anonfun$map$1.apply(
>> TraversableLike.scala:234)
>> at scala.collection.immutable.List.foreach(List.scala:381)
>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
>> at scala.collection.immutable.List.map(List.scala:285)
>> at org.apache.spark.sql.catalyst.expressions.objects.Invoke.doG
>> enCode(objects.scala:134)
>> at org.apache.spark.sql.catalyst.expressions.Expression$$anonfu
>> n$genCode$2.apply(Expression.scala:104)
>> at org.apache.spark.sql.catalyst.expressions.Expression$$anonfu
>> n$genCode$2.apply(Expression.scala:101)
>> at scala.Option.getOrElse(Option.scala:121)
>> at org.apache.spark.sql.catalyst.expressions.Expression.genCode
>> (Expression.scala:101)
>> at org.apache.spark.sql.execution.FilterExec.org$apache$spark$
>> sql$execution$FilterExec$$genPredicate$1(basicPhysicalOp
>> erators.scala:127)
>> at org.apache.spark.sql.execution.FilterExec$$anonfun$12.apply(
>> basicPhysicalOperators.scala:169)
>> at org.apache.spark.sql.execution.FilterExec$$anonfun$12.apply(
>> basicPhysicalOperators.scala:153)
>> at scala.collection.TraversableLike$$anonfun$map$1.apply(
>> TraversableLike.scala:234)
>> at scala.collection.TraversableLike$$anonfun$map$1.apply(
>> TraversableLike.scala:234)
>> at scala.collection.immutable.List.foreach(List.scala:381)
>> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
>> at scala.collection.immutable.List.map(List.scala:285)
>> at org.apache.spark.sql.execution.FilterExec.doConsume(basicPhy
>> sicalOperators.scala:153)
>> at org.apache.spark.sql.execution.CodegenSupport$class.consume(
>> WholeStageCodegenExec.scala:153)
>> at org.apache.spark.sql.execution.InputAdapter.consume(
>> WholeStageCodegenExec.scala:218)
>> at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeS
>> tageCodegenExec.scala:244)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:83)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
>> WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.InputAdapter.produce(
>> WholeStageCodegenExec.scala:218)
>> at org.apache.spark.sql.execution.FilterExec.doProduce(basicPhy
>> sicalOperators.scala:113)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:83)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
>> WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.FilterExec.produce(basicPhysi
>> calOperators.scala:79)
>> at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPh
>> ysicalOperators.scala:40)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:83)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
>> WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.ProjectExec.produce(basicPhys
>> icalOperators.scala:30)
>> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.d
>> oProduceWithKeys(HashAggregateExec.scala:526)
>> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.
>> doProduce(HashAggregateExec.scala:145)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:83)
>> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$
>> produce$1.apply(WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
>> WholeStageCodegenExec.scala:78)
>> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.
>> produce(HashAggregateExec.scala:37)
>> at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeG
>> en(WholeStageCodegenExec.scala:309)
>> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecu
>> te(WholeStageCodegenExec.scala:347)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.
>> apply(SparkPlan.scala:115)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.
>> apply(SparkPlan.scala:115)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
>> at org.apache.spark.sql.execution.exchange.ShuffleExchange.prep
>> areShuffleDependency(ShuffleExchange.scala:86)
>> at org.apache.spark.sql.execution.exchange.ShuffleExchange$$
>> anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
>> at org.apache.spark.sql.execution.exchange.ShuffleExchange$$
>> anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
>> at org.apache.spark.sql.catalyst.errors.package$.attachTree(pac
>> kage.scala:49)
>> at org.apache.spark.sql.execution.exchange.ShuffleExchange.
>> doExecute(ShuffleExchange.scala:113)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.
>> apply(SparkPlan.scala:115)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.
>> apply(SparkPlan.scala:115)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
>> at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeS
>> tageCodegenExec.scala:233)
>> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.
>> inputRDDs(HashAggregateExec.scala:138)
>> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecu
>> te(WholeStageCodegenExec.scala:361)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.
>> apply(SparkPlan.scala:115)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.
>> apply(SparkPlan.scala:115)
>> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQue
>> ry$1.apply(SparkPlan.scala:136)
>> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperati
>> onScope.scala:151)
>> at org.apache.spark.sql.execution.SparkPlan.executeQuery(
>> SparkPlan.scala:133)
>> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
>> at org.apache.spark.sql.execution.TakeOrderedAndProjectExec.exe
>> cuteCollect(limit.scala:128)
>> at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$D
>> ataset$$execute$1$1.apply(Dataset.scala:2183)
>> at org.apache.spark.sql.execution.SQLExecution$.withNewExecutio
>> nId(SQLExecution.scala:57)
>> at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)
>> at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$e
>> xecute$1(Dataset.scala:2182)
>> at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$c
>> ollect(Dataset.scala:2189)
>> at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1925)
>> at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1924)
>> at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2562)
>> at org.apache.spark.sql.Dataset.head(Dataset.scala:1924)
>> at org.apache.spark.sql.Dataset.take(Dataset.scala:2139)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAcce
>> ssorImpl.java:62)
>> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMe
>> thodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:498)
>> at org.apache.zeppelin.spark.ZeppelinContext.showDF(ZeppelinCon
>> text.java:216)
>> at org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(Spar
>> kSqlInterpreter.java:129)
>> at org.apache.zeppelin.interpreter.LazyOpenInterpreter.
>> interpret(LazyOpenInterpreter.java:94)
>> at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServ
>> er$InterpretJob.jobRun(RemoteInterpreterServer.java:341)
>> at org.apache.zeppelin.scheduler.Job.run(Job.java:176)
>> at org.apache.zeppelin.scheduler.ParallelScheduler$JobRunner.ru
>> n(ParallelScheduler.java:162)
>> at java.util.concurrent.Executors$RunnableAdapter.call(
>> Executors.java:511)
>> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>> at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFu
>> tureTask.access$201(ScheduledThreadPoolExecutor.java:180)
>> at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFu
>> tureTask.run(ScheduledThreadPoolExecutor.java:293)
>> at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPool
>> Executor.java:1142)
>> at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoo
>> lExecutor.java:617)
>> at java.lang.Thread.run(Thread.java:745)
>>
>>
>

-- 


[image: What's New with Xactly] <http://www.xactlycorp.com/email-click/>

<https://www.nyse.com/quote/XNYS:XTLY>  [image: LinkedIn] 
<https://www.linkedin.com/company/xactly-corporation>  [image: Twitter] 
<https://twitter.com/Xactly>  [image: Facebook] 
<https://www.facebook.com/XactlyCorp>  [image: YouTube] 
<http://www.youtube.com/xactlycorporation>

Re: zeppelin spark sql - ClassNotFoundException: $line70280873551.$read$

Posted by Nirav Patel <np...@xactlycorp.com>.
Never mind. I had logically incorrect transformation.

val validInputDocs = inputDocsDs.filter(doc => {

    (doc.labelIdx != -1 &&  doc.label != "test ") // Predicate should be
!"test".equals(doc.label) ; I copied incorrect one from sql statement I
wrote earlier :)
  })

Fixing above seem to resolve issue . It's interesting though as its not
syntactically incorrect.


On Thu, Nov 17, 2016 at 2:24 PM, Nirav Patel <np...@xactlycorp.com> wrote:

> Recently I started to getting following error upon execution of spark sql.
>
>
> validInputDocs.createOrReplaceTempView("valInput")
>
> %sql select count(*) from valInput //Fails with ClassNotFoundException
> exception
>
> But validInputDocs.show works just fine.
>
> ANy interpreter settings that may have affected sqlContext or classpath?
>
>
> java.lang.ClassNotFoundException: $line70280873551.$read$
> at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:348)
> at org.apache.spark.util.Utils$.classForName(Utils.scala:225)
> at org.apache.spark.sql.catalyst.encoders.OuterScopes$$anonfun$
> getOuterScope$1.apply(OuterScopes.scala:62)
> at org.apache.spark.sql.catalyst.expressions.objects.
> NewInstance$$anonfun$13.apply(objects.scala:238)
> at org.apache.spark.sql.catalyst.expressions.objects.
> NewInstance$$anonfun$13.apply(objects.scala:238)
> at scala.Option.map(Option.scala:146)
> at org.apache.spark.sql.catalyst.expressions.objects.
> NewInstance.doGenCode(objects.scala:238)
> at org.apache.spark.sql.catalyst.expressions.Expression$$
> anonfun$genCode$2.apply(Expression.scala:104)
> at org.apache.spark.sql.catalyst.expressions.Expression$$
> anonfun$genCode$2.apply(Expression.scala:101)
> at scala.Option.getOrElse(Option.scala:121)
> at org.apache.spark.sql.catalyst.expressions.Expression.
> genCode(Expression.scala:101)
> at org.apache.spark.sql.catalyst.expressions.objects.Invoke$$
> anonfun$6.apply(objects.scala:134)
> at org.apache.spark.sql.catalyst.expressions.objects.Invoke$$
> anonfun$6.apply(objects.scala:134)
> at scala.collection.TraversableLike$$anonfun$map$
> 1.apply(TraversableLike.scala:234)
> at scala.collection.TraversableLike$$anonfun$map$
> 1.apply(TraversableLike.scala:234)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
> at scala.collection.immutable.List.map(List.scala:285)
> at org.apache.spark.sql.catalyst.expressions.objects.Invoke.
> doGenCode(objects.scala:134)
> at org.apache.spark.sql.catalyst.expressions.Expression$$
> anonfun$genCode$2.apply(Expression.scala:104)
> at org.apache.spark.sql.catalyst.expressions.Expression$$
> anonfun$genCode$2.apply(Expression.scala:101)
> at scala.Option.getOrElse(Option.scala:121)
> at org.apache.spark.sql.catalyst.expressions.Expression.
> genCode(Expression.scala:101)
> at org.apache.spark.sql.execution.FilterExec.org$
> apache$spark$sql$execution$FilterExec$$genPredicate$1(
> basicPhysicalOperators.scala:127)
> at org.apache.spark.sql.execution.FilterExec$$anonfun$12.apply(
> basicPhysicalOperators.scala:169)
> at org.apache.spark.sql.execution.FilterExec$$anonfun$12.apply(
> basicPhysicalOperators.scala:153)
> at scala.collection.TraversableLike$$anonfun$map$
> 1.apply(TraversableLike.scala:234)
> at scala.collection.TraversableLike$$anonfun$map$
> 1.apply(TraversableLike.scala:234)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
> at scala.collection.immutable.List.map(List.scala:285)
> at org.apache.spark.sql.execution.FilterExec.doConsume(
> basicPhysicalOperators.scala:153)
> at org.apache.spark.sql.execution.CodegenSupport$class.consume(
> WholeStageCodegenExec.scala:153)
> at org.apache.spark.sql.execution.InputAdapter.
> consume(WholeStageCodegenExec.scala:218)
> at org.apache.spark.sql.execution.InputAdapter.doProduce(
> WholeStageCodegenExec.scala:244)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:83)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.InputAdapter.
> produce(WholeStageCodegenExec.scala:218)
> at org.apache.spark.sql.execution.FilterExec.doProduce(
> basicPhysicalOperators.scala:113)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:83)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.FilterExec.produce(
> basicPhysicalOperators.scala:79)
> at org.apache.spark.sql.execution.ProjectExec.doProduce(
> basicPhysicalOperators.scala:40)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:83)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.ProjectExec.produce(
> basicPhysicalOperators.scala:30)
> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.
> doProduceWithKeys(HashAggregateExec.scala:526)
> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.doProduce(
> HashAggregateExec.scala:145)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:83)
> at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.CodegenSupport$class.produce(
> WholeStageCodegenExec.scala:78)
> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.produce(
> HashAggregateExec.scala:37)
> at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(
> WholeStageCodegenExec.scala:309)
> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(
> WholeStageCodegenExec.scala:347)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$
> execute$1.apply(SparkPlan.scala:115)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$
> execute$1.apply(SparkPlan.scala:115)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at org.apache.spark.sql.execution.exchange.ShuffleExchange.
> prepareShuffleDependency(ShuffleExchange.scala:86)
> at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$
> doExecute$1.apply(ShuffleExchange.scala:122)
> at org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$
> doExecute$1.apply(ShuffleExchange.scala:113)
> at org.apache.spark.sql.catalyst.errors.package$.attachTree(
> package.scala:49)
> at org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(
> ShuffleExchange.scala:113)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$
> execute$1.apply(SparkPlan.scala:115)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$
> execute$1.apply(SparkPlan.scala:115)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at org.apache.spark.sql.execution.InputAdapter.inputRDDs(
> WholeStageCodegenExec.scala:233)
> at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(
> HashAggregateExec.scala:138)
> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(
> WholeStageCodegenExec.scala:361)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$
> execute$1.apply(SparkPlan.scala:115)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$
> execute$1.apply(SparkPlan.scala:115)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(
> SparkPlan.scala:136)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(
> RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:
> 133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at org.apache.spark.sql.execution.TakeOrderedAndProjectExec.
> executeCollect(limit.scala:128)
> at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$
> Dataset$$execute$1$1.apply(Dataset.scala:2183)
> at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(
> SQLExecution.scala:57)
> at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)
> at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$
> execute$1(Dataset.scala:2182)
> at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$
> collect(Dataset.scala:2189)
> at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1925)
> at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1924)
> at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2562)
> at org.apache.spark.sql.Dataset.head(Dataset.scala:1924)
> at org.apache.spark.sql.Dataset.take(Dataset.scala:2139)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at sun.reflect.NativeMethodAccessorImpl.invoke(
> NativeMethodAccessorImpl.java:62)
> at sun.reflect.DelegatingMethodAccessorImpl.invoke(
> DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at org.apache.zeppelin.spark.ZeppelinContext.showDF(
> ZeppelinContext.java:216)
> at org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(
> SparkSqlInterpreter.java:129)
> at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(
> LazyOpenInterpreter.java:94)
> at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$
> InterpretJob.jobRun(RemoteInterpreterServer.java:341)
> at org.apache.zeppelin.scheduler.Job.run(Job.java:176)
> at org.apache.zeppelin.scheduler.ParallelScheduler$JobRunner.
> run(ParallelScheduler.java:162)
> at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at java.util.concurrent.ScheduledThreadPoolExecutor$
> ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
> at java.util.concurrent.ScheduledThreadPoolExecutor$
> ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
> at java.util.concurrent.ThreadPoolExecutor.runWorker(
> ThreadPoolExecutor.java:1142)
> at java.util.concurrent.ThreadPoolExecutor$Worker.run(
> ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745)
>
>

-- 


[image: What's New with Xactly] <http://www.xactlycorp.com/email-click/>

<https://www.nyse.com/quote/XNYS:XTLY>  [image: LinkedIn] 
<https://www.linkedin.com/company/xactly-corporation>  [image: Twitter] 
<https://twitter.com/Xactly>  [image: Facebook] 
<https://www.facebook.com/XactlyCorp>  [image: YouTube] 
<http://www.youtube.com/xactlycorporation>