You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Josh Rosen (JIRA)" <ji...@apache.org> on 2015/07/29 08:55:06 UTC

[jira] [Commented] (SPARK-9127) Codegen of Random may fail to compile

    [ https://issues.apache.org/jira/browse/SPARK-9127?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14645571#comment-14645571 ] 

Josh Rosen commented on SPARK-9127:
-----------------------------------

Is this still a problem?

> Codegen of Random may fail to compile
> -------------------------------------
>
>                 Key: SPARK-9127
>                 URL: https://issues.apache.org/jira/browse/SPARK-9127
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>            Reporter: Davies Liu
>            Assignee: Wenchen Fan
>            Priority: Critical
>
> {code}
> ERROR GenerateMutableProjection: failed to compile:
>       public Object generate(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
>         return new SpecificProjection(expr);
>       }
>       class SpecificProjection extends org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection {
>         private org.apache.spark.sql.catalyst.expressions.Expression[] expressions = null;
>         private org.apache.spark.sql.catalyst.expressions.MutableRow mutableRow = null;
>         private org.apache.spark.util.random.XORShiftRandom rng4 = new org.apache.spark.util.random.XORShiftRandom(-5419823303878592871 + org.apache.spark.TaskContext.getPartitionId());
>         public SpecificProjection(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
>           expressions = expr;
>           mutableRow = new org.apache.spark.sql.catalyst.expressions.GenericMutableRow(2);
>         }
>         public org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection target(org.apache.spark.sql.catalyst.expressions.MutableRow row) {
>           mutableRow = row;
>           return this;
>         }
>         /* Provide immutable access to the last projected row. */
>         public InternalRow currentValue() {
>           return (InternalRow) mutableRow;
>         }
>         public Object apply(Object _i) {
>           InternalRow i = (InternalRow) _i;
>         boolean isNull0 = i.isNullAt(0);
>         long primitive1 = isNull0 ?
>             -1L : (i.getLong(0));
>           if(isNull0)
>             mutableRow.setNullAt(0);
>           else
>             mutableRow.setLong(0, primitive1);
>       final double primitive3 = rng4.nextDouble();
>           if(false)
>             mutableRow.setNullAt(1);
>           else
>             mutableRow.setDouble(1, primitive3);
>           return mutableRow;
>         }
>       }
> org.codehaus.commons.compiler.CompileException: Line 10, Column 117: Value of decimal integer literal '5419823303878592871' is out of range
> 	at org.codehaus.janino.UnitCompiler.compileException(UnitCompiler.java:10473)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:4696)
> 	at org.codehaus.janino.UnitCompiler.access$9200(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$11.visitIntegerLiteral(UnitCompiler.java:4402)
> 	at org.codehaus.janino.Java$IntegerLiteral.accept(Java.java:4321)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4427)
> 	at org.codehaus.janino.UnitCompiler.getNegatedConstantValue2(UnitCompiler.java:4856)
> 	at org.codehaus.janino.UnitCompiler.getNegatedConstantValue2(UnitCompiler.java:4890)
> 	at org.codehaus.janino.UnitCompiler.access$10400(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$12.visitIntegerLiteral(UnitCompiler.java:4823)
> 	at org.codehaus.janino.Java$IntegerLiteral.accept(Java.java:4321)
> 	at org.codehaus.janino.UnitCompiler.getNegatedConstantValue(UnitCompiler.java:4848)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:4451)
> 	at org.codehaus.janino.UnitCompiler.access$8800(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$11.visitUnaryOperation(UnitCompiler.java:4393)
> 	at org.codehaus.janino.Java$UnaryOperation.accept(Java.java:3647)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4427)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:4498)
> 	at org.codehaus.janino.UnitCompiler.access$8900(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$11.visitBinaryOperation(UnitCompiler.java:4394)
> 	at org.codehaus.janino.Java$BinaryOperation.accept(Java.java:3768)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4427)
> 	at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4360)
> 	at org.codehaus.janino.UnitCompiler.invokeConstructor(UnitCompiler.java:6681)
> 	at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4126)
> 	at org.codehaus.janino.UnitCompiler.access$7600(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$10.visitNewClassInstance(UnitCompiler.java:3275)
> 	at org.codehaus.janino.Java$NewClassInstance.accept(Java.java:4085)
> 	at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
> 	at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1659)
> 	at org.codehaus.janino.UnitCompiler.access$800(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$4.visitFieldDeclaration(UnitCompiler.java:933)
> 	at org.codehaus.janino.Java$FieldDeclaration.accept(Java.java:1818)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> 	at org.codehaus.janino.UnitCompiler.initializeInstanceVariablesAndInvokeInstanceInitializers(UnitCompiler.java:6101)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2284)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:518)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> 	at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> 	at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> 	at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> 	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> 	at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> 	at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> 	at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> 	at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> 	at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> 	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> 	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> 	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:268)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:292)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:289)
> 	at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> 	at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> 	at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> 	at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257)
> 	at com.google.common.cache.LocalCache.get(LocalCache.java:4000)
> 	at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> 	at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:256)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:89)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:305)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:302)
> 	at org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:186)
> 	at org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:42)
> 	at org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:42)
> 	at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:45)
> 	at org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:44)
> 	at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> 	at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> 	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> 	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> 	at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> 	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> 	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> 	at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> 	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> 	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> 	at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> 	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
> 	at org.apache.spark.scheduler.Task.run(Task.scala:70)
> 	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> 	at java.lang.Thread.run(Thread.java:745)
> 15/07/16 21:56:16 ERROR Project: Failed to generate mutable projection, fallback to interpreted
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org