You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Cheng Lian (JIRA)" <ji...@apache.org> on 2015/06/19 00:39:02 UTC

[jira] [Updated] (SPARK-8461) ClassNotFoundException when code generation is enabled

     [ https://issues.apache.org/jira/browse/SPARK-8461?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Cheng Lian updated SPARK-8461:
------------------------------
    Description: 
Build Spark with {{-Phive}}, then run the following Spark shell snippet:
{code}
sqlContext.range(0, 2).select(lit("a") as 'a).coalesce(1).write.mode("overwrite").json("file:///tmp/foo")
{code}
Exception thrown:
{noformat}
15/06/18 15:36:30 ERROR codegen.GenerateMutableProjection: failed to compile:

      import org.apache.spark.sql.catalyst.InternalRow;

      public SpecificProjection generate(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
        return new SpecificProjection(expr);
      }

      class SpecificProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection {

        private org.apache.spark.sql.catalyst.expressions.Expression[] expressions = null;
        private org.apache.spark.sql.catalyst.expressions.MutableRow mutableRow = null;

        public SpecificProjection(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
          expressions = expr;
          mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericMutableRow(1);
        }

        public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(org.apache.spark.sql.catalyst.expressions.MutableRow row) {
          mutableRow = row;
          return this;
        }

        /* Provide immutable access to the last projected row. */
        public InternalRow currentValue() {
          return (InternalRow) mutableRow;
        }

        public Object apply(Object _i) {
          InternalRow i = (InternalRow) _i;

      /* expression: a */
      Object obj2 = expressions[0].eval(i);
      boolean isNull0 = obj2 == null;
      org.apache.spark.unsafe.types.UTF8String primitive1 = null;
      if (!isNull0) {
        primitive1 = (org.apache.spark.unsafe.types.UTF8String) obj2;
      }

          if(isNull0)
            mutableRow.setNullAt(0);
          else
            mutableRow.update(0, primitive1);


          return mutableRow;
        }
      }

org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more
15/06/18 15:36:30 ERROR executor.Executor: Exception in task 0.0 in stage 4.0 (TID 18)
java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
        at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
        at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
        at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
        at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        ... 38 more
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more
15/06/18 15:36:30 WARN scheduler.TaskSetManager: Lost task 0.0 in stage 4.0 (TID 18, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
        at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
        at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
        at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
        at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        ... 38 more
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more

15/06/18 15:36:30 ERROR scheduler.TaskSetManager: Task 0 in stage 4.0 failed 1 times; aborting job
15/06/18 15:36:30 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 4.0, whose tasks have all completed, from pool
15/06/18 15:36:30 INFO scheduler.TaskSchedulerImpl: Cancelling stage 4
15/06/18 15:36:30 INFO scheduler.DAGScheduler: ResultStage 4 (json at <console>:23) failed in 0.054 s
15/06/18 15:36:30 INFO scheduler.DAGScheduler: Job 4 failed: json at <console>:23, took 0.059715 s
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 1 times, most recent failure: Lost task 0.0 in stage 4.0 (TID 18, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
        at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
        at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
        at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
        at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        ... 38 more
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more

Driver stacktrace:
        at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1285)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1276)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1275)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
        at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1275)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:749)
        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1484)
        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1445)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
{noformat}
Disabling code generation 

  was:
Build Spark with {{-Phive}}, then run the following Spark shell snippet:
{code}
sqlContext.range(0, 2).select(lit("a") as 'a).coalesce(1).write.mode("overwrite").json("file:///tmp/foo")
{code}
Exception thrown:
{noformat}
15/06/18 15:36:30 ERROR codegen.GenerateMutableProjection: failed to compile:

      import org.apache.spark.sql.catalyst.InternalRow;

      public SpecificProjection generate(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
        return new SpecificProjection(expr);
      }

      class SpecificProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection {

        private org.apache.spark.sql.catalyst.expressions.Expression[] expressions = null;
        private org.apache.spark.sql.catalyst.expressions.MutableRow mutableRow = null;

        public SpecificProjection(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
          expressions = expr;
          mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericMutableRow(1);
        }

        public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(org.apache.spark.sql.catalyst.expressions.MutableRow row) {
          mutableRow = row;
          return this;
        }

        /* Provide immutable access to the last projected row. */
        public InternalRow currentValue() {
          return (InternalRow) mutableRow;
        }

        public Object apply(Object _i) {
          InternalRow i = (InternalRow) _i;

      /* expression: a */
      Object obj2 = expressions[0].eval(i);
      boolean isNull0 = obj2 == null;
      org.apache.spark.unsafe.types.UTF8String primitive1 = null;
      if (!isNull0) {
        primitive1 = (org.apache.spark.unsafe.types.UTF8String) obj2;
      }

          if(isNull0)
            mutableRow.setNullAt(0);
          else
            mutableRow.update(0, primitive1);


          return mutableRow;
        }
      }

org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more
15/06/18 15:36:30 ERROR executor.Executor: Exception in task 0.0 in stage 4.0 (TID 18)
java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
        at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
        at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
        at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
        at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        ... 38 more
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more
15/06/18 15:36:30 WARN scheduler.TaskSetManager: Lost task 0.0 in stage 4.0 (TID 18, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
        at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
        at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
        at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
        at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        ... 38 more
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more

15/06/18 15:36:30 ERROR scheduler.TaskSetManager: Task 0 in stage 4.0 failed 1 times; aborting job
15/06/18 15:36:30 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 4.0, whose tasks have all completed, from pool
15/06/18 15:36:30 INFO scheduler.TaskSchedulerImpl: Cancelling stage 4
15/06/18 15:36:30 INFO scheduler.DAGScheduler: ResultStage 4 (json at <console>:23) failed in 0.054 s
15/06/18 15:36:30 INFO scheduler.DAGScheduler: Job 4 failed: json at <console>:23, took 0.059715 s
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 1 times, most recent failure: Lost task 0.0 in stage 4.0 (TID 18, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
        at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
        at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
        at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
        at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
        at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
        at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
        at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
        at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
        at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
        at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
        at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
        at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
        at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
        at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
        at org.apache.spark.scheduler.Task.run(Task.scala:70)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
        at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
        at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
        at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
        at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
        at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
        at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
        at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
        at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
        at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
        at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
        at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
        at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
        at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
        at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
        at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
        at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
        at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
        at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
        at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
        at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        ... 38 more
Caused by: java.lang.ClassNotFoundException: Object
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:344)
        at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
        at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
        at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
        ... 80 more
Caused by: java.lang.ClassNotFoundException: Object
        at java.lang.ClassLoader.findClass(ClassLoader.java:530)
        at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
        at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
        ... 87 more

Driver stacktrace:
        at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1285)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1276)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1275)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
        at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1275)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
        at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
        at scala.Option.foreach(Option.scala:236)
        at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:749)
        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1484)
        at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1445)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
{noformat}


> ClassNotFoundException when code generation is enabled
> ------------------------------------------------------
>
>                 Key: SPARK-8461
>                 URL: https://issues.apache.org/jira/browse/SPARK-8461
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.5.0
>            Reporter: Cheng Lian
>            Priority: Blocker
>
> Build Spark with {{-Phive}}, then run the following Spark shell snippet:
> {code}
> sqlContext.range(0, 2).select(lit("a") as 'a).coalesce(1).write.mode("overwrite").json("file:///tmp/foo")
> {code}
> Exception thrown:
> {noformat}
> 15/06/18 15:36:30 ERROR codegen.GenerateMutableProjection: failed to compile:
>       import org.apache.spark.sql.catalyst.InternalRow;
>       public SpecificProjection generate(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
>         return new SpecificProjection(expr);
>       }
>       class SpecificProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection {
>         private org.apache.spark.sql.catalyst.expressions.Expression[] expressions = null;
>         private org.apache.spark.sql.catalyst.expressions.MutableRow mutableRow = null;
>         public SpecificProjection(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
>           expressions = expr;
>           mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericMutableRow(1);
>         }
>         public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(org.apache.spark.sql.catalyst.expressions.MutableRow row) {
>           mutableRow = row;
>           return this;
>         }
>         /* Provide immutable access to the last projected row. */
>         public InternalRow currentValue() {
>           return (InternalRow) mutableRow;
>         }
>         public Object apply(Object _i) {
>           InternalRow i = (InternalRow) _i;
>       /* expression: a */
>       Object obj2 = expressions[0].eval(i);
>       boolean isNull0 = obj2 == null;
>       org.apache.spark.unsafe.types.UTF8String primitive1 = null;
>       if (!isNull0) {
>         primitive1 = (org.apache.spark.unsafe.types.UTF8String) obj2;
>       }
>           if(isNull0)
>             mutableRow.setNullAt(0);
>           else
>             mutableRow.update(0, primitive1);
>           return mutableRow;
>         }
>       }
> org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
>         at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
>         at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
>         at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
>         at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
>         at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
>         at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
>         at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
>         at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
>         at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
>         at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
>         at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
>         at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
>         at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
>         at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
>         at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
>         at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
>         at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
>         at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>         at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>         at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
>         at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
>         at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
>         at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
>         at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
>         at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
>         at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
>         at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
>         at org.apache.spark.scheduler.Task.run(Task.scala:70)
>         at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ClassNotFoundException: Object
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at java.lang.Class.forName0(Native Method)
>         at java.lang.Class.forName(Class.java:344)
>         at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
>         at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
>         ... 80 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at java.lang.ClassLoader.findClass(ClassLoader.java:530)
>         at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
>         ... 87 more
> 15/06/18 15:36:30 ERROR executor.Executor: Exception in task 0.0 in stage 4.0 (TID 18)
> java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
>         at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
>         at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
>         at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
>         at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
>         at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>         at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
>         at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
>         at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
>         at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
>         at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
>         at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
>         at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
>         at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
>         at org.apache.spark.scheduler.Task.run(Task.scala:70)
>         at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
>         at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
>         at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
>         at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
>         at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
>         at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
>         at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
>         at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
>         at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
>         at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
>         at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
>         at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
>         at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
>         at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
>         at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
>         at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
>         at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
>         at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
>         at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>         ... 38 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at java.lang.Class.forName0(Native Method)
>         at java.lang.Class.forName(Class.java:344)
>         at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
>         at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
>         ... 80 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at java.lang.ClassLoader.findClass(ClassLoader.java:530)
>         at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
>         ... 87 more
> 15/06/18 15:36:30 WARN scheduler.TaskSetManager: Lost task 0.0 in stage 4.0 (TID 18, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
>         at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
>         at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
>         at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
>         at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
>         at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>         at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
>         at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
>         at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
>         at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
>         at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
>         at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
>         at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
>         at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
>         at org.apache.spark.scheduler.Task.run(Task.scala:70)
>         at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
>         at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
>         at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
>         at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
>         at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
>         at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
>         at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
>         at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
>         at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
>         at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
>         at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
>         at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
>         at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
>         at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
>         at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
>         at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
>         at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
>         at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
>         at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>         ... 38 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at java.lang.Class.forName0(Native Method)
>         at java.lang.Class.forName(Class.java:344)
>         at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
>         at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
>         ... 80 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at java.lang.ClassLoader.findClass(ClassLoader.java:530)
>         at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
>         ... 87 more
> 15/06/18 15:36:30 ERROR scheduler.TaskSetManager: Task 0 in stage 4.0 failed 1 times; aborting job
> 15/06/18 15:36:30 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 4.0, whose tasks have all completed, from pool
> 15/06/18 15:36:30 INFO scheduler.TaskSchedulerImpl: Cancelling stage 4
> 15/06/18 15:36:30 INFO scheduler.DAGScheduler: ResultStage 4 (json at <console>:23) failed in 0.054 s
> 15/06/18 15:36:30 INFO scheduler.DAGScheduler: Job 4 failed: json at <console>:23, took 0.059715 s
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 4.0 failed 1 times, most recent failure: Lost task 0.0 in stage 4.0 (TID 18, localhost): java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
>         at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
>         at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
>         at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
>         at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
>         at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>         at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
>         at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
>         at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
>         at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
>         at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
>         at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
>         at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
>         at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
>         at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:93)
>         at org.apache.spark.rdd.CoalescedRDD$$anonfun$compute$1.apply(CoalescedRDD.scala:92)
>         at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.sql.DataFrame$$anonfun$toJSON$1$$anon$1.hasNext(DataFrame.scala:1471)
>         at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply$mcV$sp(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13$$anonfun$apply$6.apply(PairRDDFunctions.scala:1108)
>         at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1285)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1116)
>         at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1$$anonfun$13.apply(PairRDDFunctions.scala:1095)
>         at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
>         at org.apache.spark.scheduler.Task.run(Task.scala:70)
>         at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
>         at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
>         at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
>         at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
>         at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
>         at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
>         at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
>         at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
>         at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
>         at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
>         at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
>         at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
>         at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
>         at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
>         at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
>         at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
>         at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
>         at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
>         at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
>         at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
>         at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
>         at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
>         at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
>         at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
>         at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
>         at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
>         at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
>         at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>         at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>         ... 38 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:79)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at java.lang.Class.forName0(Native Method)
>         at java.lang.Class.forName(Class.java:344)
>         at org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
>         at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
>         at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
>         ... 80 more
> Caused by: java.lang.ClassNotFoundException: Object
>         at java.lang.ClassLoader.findClass(ClassLoader.java:530)
>         at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
>         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>         at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
>         at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:74)
>         ... 87 more
> Driver stacktrace:
>         at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1285)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1276)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1275)
>         at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>         at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
>         at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1275)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
>         at scala.Option.foreach(Option.scala:236)
>         at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:749)
>         at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1484)
>         at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1445)
>         at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
> {noformat}
> Disabling code generation 



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org