You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Ashish (Jira)" <ji...@apache.org> on 2019/09/20 20:47:00 UTC
[jira] [Commented] (SPARK-19984) ERROR codegen.CodeGenerator:
failed to compile: org.codehaus.commons.compiler.CompileException: File
'generated.java'
[ https://issues.apache.org/jira/browse/SPARK-19984?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16934744#comment-16934744 ]
Ashish commented on SPARK-19984:
--------------------------------
I faced the similar error. I am using spark 2.4. I am using a lot of data frames and was using dropduplicates() function with almost all the df. Part of the log below,which I got:
2019-09-20 17:10:14,130 [Driver] ERROR org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator - failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 686, Column 28: Redefinition of parameter "agg_expr_11"2019-09-20 17:10:14,130 [Driver] ERROR org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator - failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 686, Column 28: Redefinition of parameter "agg_expr_11"org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 686, Column 28: Redefinition of parameter "agg_expr_11" at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11821) at org.codehaus.janino.UnitCompiler.buildLocalVariableMap(UnitCompiler.java:3174) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3009) at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1336) at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1309) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:799) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:958) at org.codehaus.janino.UnitCompiler.access$700(UnitCompiler.java:212) at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:393) at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:385) at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1286) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:385) at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1285) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:825) at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:411) at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:212) at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:390) at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:385) at org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1405) at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:385) at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:357) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:234) at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:446) at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:313) at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:235) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:204) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1420) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1496) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1493) at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000) at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1368) at org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:579) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:578) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371) at org.apache.spark.sql.execution.SortExec.inputRDDs(SortExec.scala:121) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.doExecute(WholeStageCodegenExec.scala:363) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.joins.SortMergeJoinExec.inputRDDs(SortMergeJoinExec.scala:386) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.prepareShuffleDependency(ShuffleExchangeExec.scala:92) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:128) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec$$anonfun$doExecute$1.apply(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) at org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:119) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:371) at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:150) at org.apache.spark.sql.execution.aggregate.HashAggregateExec.inputRDDs(HashAggregateExec.scala:150) at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:605) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:180) at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:154) at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104) at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102) at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80) at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80) at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654) at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654) at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273) at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267) at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:225) at org.apache.spark.sql.DataFrameWriter.parquet(DataFrameWriter.scala:547) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$4.run(ApplicationMaster.scala:721)2019-09-20 17:10:14,176 [Driver] INFO org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator - /* 001 */ public Object generate(Object[] references) \{/* 002 */ return new GeneratedIteratorForCodegenStage7(references);/* 003 */ }/* 004 *//* 005 */ final class GeneratedIteratorForCodegenStage7 extends org.apache.spark.sql.execution.BufferedRowIterator \{/* 006 */ private Object[] references;/* 007 */ private scala.collection.Iterator[] inputs;/* 008 */ private boolean agg_initAgg;/* 009 */ private org.apache.spark.unsafe.KVIterator agg_mapIter;/* 010 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap;/* 011 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter;/* 012 */ private boolean agg_initAgg1;/* 013 */ private org.apache.spark.unsafe.KVIterator agg_mapIter1;/* 014 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap1;/* 015 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter1;/* 016 */ private boolean agg_initAgg2;/* 017 */ private org.apache.spark.unsafe.KVIterator agg_mapIter2;/* 018 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap2;/* 019 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter2;/* 020 */ private boolean agg_initAgg3;/* 021 */ private org.apache.spark.unsafe.KVIterator agg_mapIter3;/* 022 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap3;/* 023 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter3;/* 024 */ private boolean agg_initAgg4;/* 025 */ private org.apache.spark.unsafe.KVIterator agg_mapIter4;/* 026 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap4;/* 027 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter4;/* 028 */ private scala.collection.Iterator inputadapter_input;/* 029 */ private org.apache.spark.sql.execution.joins.UnsafeHashedRelation bhj_relation;/* 030 */ private org.apache.spark.sql.execution.joins.LongHashedRelation bhj_relation1;/* 031 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder[] agg_mutableStateArray1 = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder[18];/* 032 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] agg_mutableStateArray2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[18];/* 033 */ private UnsafeRow[] agg_mutableStateArray = new UnsafeRow[18];/* 034 *//* 035 */ public GeneratedIteratorForCodegenStage7(Object[] references) {/* 036 */ this.references = references;/* 037 */ }/* 038 *//* 039 */ public void init(int index, scala.collection.Iterator[] inputs) \{/* 040 */ partitionIndex = index;/* 041 */ this.inputs = inputs;/* 042 */ wholestagecodegen_init_0();/* 043 */ wholestagecodegen_init_1();/* 044 */ wholestagecodegen_init_2();/* 045 */ wholestagecodegen_init_3();/* 046 */ wholestagecodegen_init_4();/* 047 */ wholestagecodegen_init_5();/* 048 */ wholestagecodegen_init_6();/* 049 *//* 050 */ }/* 051 *//* 052 */ private void wholestagecodegen_init_0() \{/* 053 */ agg_hashMap = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[0] /* plan */).createHashMap();/* 054 */ agg_hashMap1 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[4] /* plan */).createHashMap();/* 055 */ agg_hashMap2 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[8] /* plan */).createHashMap();/* 056 */ agg_hashMap3 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[12] /* plan */).createHashMap();/* 057 */ agg_hashMap4 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[16] /* plan */).createHashMap();/* 058 */ inputadapter_input = inputs[0];/* 059 */ agg_mutableStateArray[0] = new UnsafeRow(7);/* 060 */ agg_mutableStateArray1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[0], 160);/* 061 */ agg_mutableStateArray2[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[0], 7);/* 062 */ agg_mutableStateArray[1] = new UnsafeRow(7);/* 063 */ agg_mutableStateArray1[1] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[1], 160);/* 064 *//* 065 */ }/* 066 *//* 067 */ private void agg_doAggregateWithKeysOutput1(UnsafeRow agg_keyTerm1, UnsafeRow agg_bufferTerm1)/* 068 */ throws java.io.IOException \{/* 069 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[25] /* numOutputRows */).add(1);/* 070 *//* 071 */ boolean agg_isNull105 = agg_keyTerm1.isNullAt(0);/* 072 */ UTF8String agg_value105 = agg_isNull105 ? null : (agg_keyTerm1.getUTF8String(0));/* 073 */ boolean agg_isNull106 = agg_keyTerm1.isNullAt(1);/* 074 */ int agg_value106 = agg_isNull106 ? -1 : (agg_keyTerm1.getInt(1));/* 075 */ boolean agg_isNull107 = agg_keyTerm1.isNullAt(2);/* 076 */ int agg_value107 = agg_isNull107 ? -1 : (agg_keyTerm1.getInt(2));/* 077 */ boolean agg_isNull108 = agg_keyTerm1.isNullAt(3);/* 078 */ long agg_value108 = agg_isNull108 ? -1L : (agg_keyTerm1.getLong(3));/* 079 */ boolean agg_isNull109 = agg_keyTerm1.isNullAt(4);/* 080 */ UTF8String agg_value109 = agg_isNull109 ? null : (agg_keyTerm1.getUTF8String(4));/* 081 */ boolean agg_isNull110 = agg_keyTerm1.isNullAt(5);/* 082 */ UTF8String agg_value110 = agg_isNull110 ? null : (agg_keyTerm1.getUTF8String(5));/* 083 */ boolean agg_isNull111 = agg_keyTerm1.isNullAt(6);/* 084 */ UTF8String agg_value111 = agg_isNull111 ? null : (agg_keyTerm1.getUTF8String(6));/* 085 */ boolean agg_isNull112 = agg_keyTerm1.isNullAt(7);/* 086 */ UTF8String agg_value112 = agg_isNull112 ? null : (agg_keyTerm1.getUTF8String(7));/* 087 */ boolean agg_isNull113 = agg_keyTerm1.isNullAt(8);/* 088 */ UTF8String agg_value113 = agg_isNull113 ? null : (agg_keyTerm1.getUTF8String(8));/* 089 */ boolean agg_isNull114 = agg_keyTerm1.isNullAt(9);/* 090 */ long agg_value114 = agg_isNull114 ? -1L : (agg_keyTerm1.getLong(9));/* 091 */ boolean agg_isNull115 = agg_keyTerm1.isNullAt(10);/* 092 */ int agg_value115 = agg_isNull115 ? -1 : (agg_keyTerm1.getInt(10));/* 093 */ boolean agg_isNull116 = agg_keyTerm1.isNullAt(11);/* 094 */ int agg_value116 = agg_isNull116 ? -1 : (agg_keyTerm1.getInt(11));/* 095 */ boolean agg_isNull117 = agg_keyTerm1.isNullAt(12);/* 096 */ double agg_value117 = agg_isNull117 ? -1.0 : (agg_keyTerm1.getDouble(12));/* 097 */ boolean agg_isNull118 = agg_keyTerm1.isNullAt(13);/* 098 */ int agg_value118 = agg_isNull118 ? -1 : (agg_keyTerm1.getInt(13));/* 099 */ boolean agg_isNull119 = agg_keyTerm1.isNullAt(14);/* 100 */ int agg_value119 = agg_isNull119 ? -1 : (agg_keyTerm1.getInt(14));/* 101 */ boolean agg_isNull120 = agg_keyTerm1.isNullAt(15);/* 102 */ UTF8String agg_value120 = agg_isNull120 ? null : (agg_keyTerm1.getUTF8String(15));/* 103 */ boolean agg_isNull121 = agg_keyTerm1.isNullAt(16);/* 104 */ UTF8String agg_value121 = agg_isNull121 ? null : (agg_keyTerm1.getUTF8String(16));/* 105 *//* 106 */ agg_doConsume2(agg_value105, agg_isNull105, agg_value106, agg_isNull106, agg_value107, agg_isNull107, agg_value108, agg_isNull108, agg_value109, agg_isNull109, agg_value110, agg_isNull110, agg_value111, agg_isNull111, agg_value112, agg_isNull112, agg_value113, agg_isNull113, agg_value114, agg_isNull114, agg_value115, agg_isNull115, agg_value116, agg_isNull116, agg_value117, agg_isNull117, agg_value118, agg_isNull118, agg_value119, agg_isNull119, agg_value120, agg_isNull120, agg_value121, agg_isNull121);/* 107 *//* 108 */ }/* 109 *//* 110 */ private void wholestagecodegen_init_3() \{/* 111 */ agg_mutableStateArray2[6] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[6], 17);/* 112 */ agg_mutableStateArray[7] = new UnsafeRow(17);/* 113 */ agg_mutableStateArray1[7] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[7], 256);/* 114 */ agg_mutableStateArray2[7] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[7], 17);/* 115 */ agg_mutableStateArray[8] = new UnsafeRow(17);/* 116 */ agg_mutableStateArray1[8] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[8], 256);/* 117 */ agg_mutableStateArray2[8] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[8], 17);/* 118 */ agg_mutableStateArray[9] = new UnsafeRow(17);/* 119 */ agg_mutableStateArray1[9] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[9], 256);/* 120 */ agg_mutableStateArray2[9] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[9], 17);/* 121 *//* 122 */ }/* 123 *//* 124 */ private void agg_doAggregateWithKeysOutput(UnsafeRow agg_keyTerm, UnsafeRow agg_bufferTerm)/* 125 */ throws java.io.IOException \{/* 126 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[20] /* numOutputRows */).add(1);/* 127 *//* 128 */ boolean agg_isNull22 = agg_keyTerm.isNullAt(3);/* 129 */ UTF8String agg_value22 = agg_isNull22 ? null : (agg_keyTerm.getUTF8String(3));/* 130 */ boolean agg_isNull23 = agg_keyTerm.isNullAt(5);/* 131 */ int agg_value23 = agg_isNull23 ? -1 : (agg_keyTerm.getInt(5));/* 132 */ boolean agg_isNull24 = agg_keyTerm.isNullAt(1);/* 133 */ long agg_value24 = agg_isNull24 ? -1L : (agg_keyTerm.getLong(1));/* 134 *//* 135 */ // generate join key for stream side/* 136 *//* 137 */ agg_mutableStateArray1[2].reset();/* 138 *//* 139 */ agg_mutableStateArray2[2].zeroOutNullBytes();/* 140 *//* 141 */ if (agg_isNull22) {/* 142 */ agg_mutableStateArray2[2].setNullAt(0);/* 143 */ } else \{/* 144 */ agg_mutableStateArray2[2].write(0, agg_value22);/* 145 */ }/* 146 *//* 147 */ if (agg_isNull23) \{/* 148 */ agg_mutableStateArray2[2].setNullAt(1);/* 149 */ } else \{/* 150 */ agg_mutableStateArray2[2].write(1, agg_value23);/* 151 */ }/* 152 *//* 153 */ if (agg_isNull24) \{/* 154 */ agg_mutableStateArray2[2].setNullAt(2);/* 155 */ } else \{/* 156 */ agg_mutableStateArray2[2].write(2, agg_value24);/* 157 */ }/* 158 */ agg_mutableStateArray[2].setTotalSize(agg_mutableStateArray1[2].totalSize());/* 159 *//* 160 */ // find matches from HashedRelation/* 161 */ UnsafeRow bhj_matched = agg_mutableStateArray[2].anyNull() ? null: (UnsafeRow)bhj_relation.getValue(agg_mutableStateArray[2]);/* 162 */ if (bhj_matched != null) \{/* 163 */ {/* 164 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[23] /* numOutputRows */).add(1);/* 165 *//* 166 */ boolean bhj_isNull3 = bhj_matched.isNullAt(0);/* 167 */ long bhj_value3 = bhj_isNull3 ? -1L : (bhj_matched.getLong(0));/* 168 */ boolean bhj_isNull4 = bhj_matched.isNullAt(1);/* 169 */ UTF8String bhj_value4 = bhj_isNull4 ? null : (bhj_matched.getUTF8String(1));/* 170 */ boolean bhj_isNull5 = bhj_matched.isNullAt(2);/* 171 */ int bhj_value5 = bhj_isNull5 ? -1 : (bhj_matched.getInt(2));/* 172 */ boolean bhj_isNull6 = bhj_matched.isNullAt(3);/* 173 */ int bhj_value6 = bhj_isNull6 ? -1 : (bhj_matched.getInt(3));/* 174 */ boolean bhj_isNull7 = bhj_matched.isNullAt(4);/* 175 */ UTF8String bhj_value7 = bhj_isNull7 ? null : (bhj_matched.getUTF8String(4));/* 176 */ boolean bhj_isNull8 = bhj_matched.isNullAt(5);/* 177 */ double bhj_value8 = bhj_isNull8 ? -1.0 : (bhj_matched.getDouble(5));/* 178 */ boolean bhj_isNull9 = bhj_matched.isNullAt(6);/* 179 */ int bhj_value9 = bhj_isNull9 ? -1 : (bhj_matched.getInt(6));/* 180 */ boolean bhj_isNull10 = bhj_matched.isNullAt(7);/* 181 */ int bhj_value10 = bhj_isNull10 ? -1 : (bhj_matched.getInt(7));/* 182 */ boolean bhj_isNull11 = bhj_matched.isNullAt(8);/* 183 */ UTF8String bhj_value11 = bhj_isNull11 ? null : (bhj_matched.getUTF8String(8));/* 184 */ boolean bhj_isNull12 = bhj_matched.isNullAt(9);/* 185 */ int bhj_value12 = bhj_isNull12 ? -1 : (bhj_matched.getInt(9));/* 186 */ boolean agg_isNull25 = agg_keyTerm.isNullAt(6);/* 187 */ UTF8String agg_value25 = agg_isNull25 ? null : (agg_keyTerm.getUTF8String(6));/* 188 */ boolean agg_isNull26 = agg_keyTerm.isNullAt(4);/* 189 */ UTF8String agg_value26 = agg_isNull26 ? null : (agg_keyTerm.getUTF8String(4));/* 190 */ boolean agg_isNull27 = agg_keyTerm.isNullAt(0);/* 191 */ UTF8String agg_value27 = agg_isNull27 ? null : (agg_keyTerm.getUTF8String(0));/* 192 */ boolean agg_isNull28 = agg_keyTerm.isNullAt(2);/* 193 */ UTF8String agg_value28 = agg_isNull28 ? null : (agg_keyTerm.getUTF8String(2));/* 194 *//* 195 */ agg_doConsume1(bhj_value3, bhj_isNull3, bhj_value4, bhj_isNull4, bhj_value5, bhj_isNull5, bhj_value6, bhj_isNull6, bhj_value7, bhj_isNull7, bhj_value8, bhj_isNull8, bhj_value9, bhj_isNull9, bhj_value10, bhj_isNull10, bhj_value11, bhj_isNull11, bhj_value12, bhj_isNull12, agg_value22, agg_isNull22, agg_value23, agg_isNull23, agg_value24, agg_isNull24, agg_value25, agg_isNull25, agg_value26, agg_isNull26, agg_value27, agg_isNull27, agg_value28, agg_isNull28);/* 196 *//* 197 */ }/* 198 */ }/* 199 *//* 200 */ }/* 201 *//* 202 */ private void agg_doAggregateWithKeysOutput4(UnsafeRow agg_keyTerm4, UnsafeRow agg_bufferTerm4)/* 203 */ throws java.io.IOException \{/* 204 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[34] /* numOutputRows */).add(1);/* 205 *//* 206 */ boolean agg_isNull474 = agg_keyTerm4.isNullAt(10);/* 207 */ long agg_value474 = agg_isNull474 ? -1L : (agg_keyTerm4.getLong(10));/* 208 */ boolean agg_isNull475 = agg_keyTerm4.isNullAt(5);/* 209 */ UTF8String agg_value475 = agg_isNull475 ? null : (agg_keyTerm4.getUTF8String(5));/* 210 */ boolean agg_isNull476 = agg_keyTerm4.isNullAt(11);/* 211 */ int agg_value476 = agg_isNull476 ? -1 : (agg_keyTerm4.getInt(11));/* 212 */ boolean agg_isNull477 = agg_keyTerm4.isNullAt(1);/* 213 */ int agg_value477 = agg_isNull477 ? -1 : (agg_keyTerm4.getInt(1));/* 214 */ boolean agg_isNull478 = agg_keyTerm4.isNullAt(20);/* 215 */ UTF8String agg_value478 = agg_isNull478 ? null : (agg_keyTerm4.getUTF8String(20));/* 216 */ boolean agg_isNull479 = agg_keyTerm4.isNullAt(16);/* 217 */ double agg_value479 = agg_isNull479 ? -1.0 : (agg_keyTerm4.getDouble(16));/* 218 */ boolean agg_isNull480 = agg_keyTerm4.isNullAt(19);/* 219 */ int agg_value480 = agg_isNull480 ? -1 : (agg_keyTerm4.getInt(19));/* 220 */ boolean agg_isNull481 = agg_keyTerm4.isNullAt(2);/* 221 */ int agg_value481 = agg_isNull481 ? -1 : (agg_keyTerm4.getInt(2));/* 222 */ boolean agg_isNull482 = agg_keyTerm4.isNullAt(4);/* 223 */ UTF8String agg_value482 = agg_isNull482 ? null : (agg_keyTerm4.getUTF8String(4));/* 224 */ boolean agg_isNull483 = agg_keyTerm4.isNullAt(18);/* 225 */ int agg_value483 = agg_isNull483 ? -1 : (agg_keyTerm4.getInt(18));/* 226 */ boolean agg_isNull484 = agg_keyTerm4.isNullAt(8);/* 227 */ UTF8String agg_value484 = agg_isNull484 ? null : (agg_keyTerm4.getUTF8String(8));/* 228 */ boolean agg_isNull485 = agg_keyTerm4.isNullAt(14);/* 229 */ int agg_value485 = agg_isNull485 ? -1 : (agg_keyTerm4.getInt(14));/* 230 */ boolean agg_isNull486 = agg_keyTerm4.isNullAt(3);/* 231 */ long agg_value486 = agg_isNull486 ? -1L : (agg_keyTerm4.getLong(3));/* 232 */ boolean agg_isNull487 = agg_keyTerm4.isNullAt(21);/* 233 */ UTF8String agg_value487 = agg_isNull487 ? null : (agg_keyTerm4.getUTF8String(21));/* 234 */ boolean agg_isNull488 = agg_keyTerm4.isNullAt(9);/* 235 */ UTF8String agg_value488 = agg_isNull488 ? null : (agg_keyTerm4.getUTF8String(9));/* 236 */ boolean agg_isNull489 = agg_keyTerm4.isNullAt(0);/* 237 */ UTF8String agg_value489 = agg_isNull489 ? null : (agg_keyTerm4.getUTF8String(0));/* 238 */ boolean agg_isNull490 = agg_keyTerm4.isNullAt(6);/* 239 */ UTF8String agg_value490 = agg_isNull490 ? null : (agg_keyTerm4.getUTF8String(6));/* 240 */ boolean agg_isNull491 = agg_keyTerm4.isNullAt(17);/* 241 */ long agg_value491 = agg_isNull491 ? -1L : (agg_keyTerm4.getLong(17));/* 242 */ boolean agg_isNull492 = agg_keyTerm4.isNullAt(13);/* 243 */ int agg_value492 = agg_isNull492 ? -1 : (agg_keyTerm4.getInt(13));/* 244 */ boolean agg_isNull493 = agg_keyTerm4.isNullAt(15);/* 245 */ UTF8String agg_value493 = agg_isNull493 ? null : (agg_keyTerm4.getUTF8String(15));/* 246 */ boolean agg_isNull494 = agg_keyTerm4.isNullAt(12);/* 247 */ int agg_value494 = agg_isNull494 ? -1 : (agg_keyTerm4.getInt(12));/* 248 */ boolean agg_isNull495 = agg_keyTerm4.isNullAt(22);/* 249 */ UTF8String agg_value495 = agg_isNull495 ? null : (agg_keyTerm4.getUTF8String(22));/* 250 */ boolean agg_isNull496 = agg_keyTerm4.isNullAt(7);/* 251 */ UTF8String agg_value496 = agg_isNull496 ? null : (agg_keyTerm4.getUTF8String(7));/* 252 */ agg_mutableStateArray1[17].reset();/* 253 *//* 254 */ agg_mutableStateArray2[17].zeroOutNullBytes();/* 255 *//* 256 */ if (agg_isNull474) {/* 257 */ agg_mutableStateArray2[17].setNullAt(0);/* 258 */ } else \{/* 259 */ agg_mutableStateArray2[17].write(0, agg_value474);/* 260 */ }/* 261 *//* 262 */ if (agg_isNull475) \{/* 263 */ agg_mutableStateArray2[17].setNullAt(1);/* 264 */ } else \{/* 265 */ agg_mutableStateArray2[17].write(1, agg_value475);/* 266 */ }/* 267 *//* 268 */ if (agg_isNull476) \{/* 269 */ agg_mutableStateArray2[17].setNullAt(2);/* 270 */ } else \{/* 271 */ agg_mutableStateArray2[17].write(2, agg_value476);/* 272 */ }/* 273 *//* 274 */ if (agg_isNull477) \{/* 275 */ agg_mutableStateArray2[17].setNullAt(3);/* 276 */ } else \{/* 277 */ agg_mutableStateArray2[17].write(3, agg_value477);/* 278 */ }/* 279 *//* 280 */ if (agg_isNull478) \{/* 281 */ agg_mutableStateArray2[17].setNullAt(4);/* 282 */ } else \{/* 283 */ agg_mutableStateArray2[17].write(4, agg_value478);/* 284 */ }/* 285 *//* 286 */ if (agg_isNull479) \{/* 287 */ agg_mutableStateArray2[17].setNullAt(5);/* 288 */ } else \{/* 289 */ agg_mutableStateArray2[17].write(5, agg_value479);/* 290 */ }/* 291 *//* 292 */ if (agg_isNull480) \{/* 293 */ agg_mutableStateArray2[17].setNullAt(6);/* 294 */ } else \{/* 295 */ agg_mutableStateArray2[17].write(6, agg_value480);/* 296 */ }/* 297 *//* 298 */ if (agg_isNull481) \{/* 299 */ agg_mutableStateArray2[17].setNullAt(7);/* 300 */ } else \{/* 301 */ agg_mutableStateArray2[17].write(7, agg_value481);/* 302 */ }/* 303 *//* 304 */ if (agg_isNull482) \{/* 305 */ agg_mutableStateArray2[17].setNullAt(8);/* 306 */ } else \{/* 307 */ agg_mutableStateArray2[17].write(8, agg_value482);/* 308 */ }/* 309 *//* 310 */ if (agg_isNull483) \{/* 311 */ agg_mutableStateArray2[17].setNullAt(9);/* 312 */ } else \{/* 313 */ agg_mutableStateArray2[17].write(9, agg_value483);/* 314 */ }/* 315 *//* 316 */ if (agg_isNull484) \{/* 317 */ agg_mutableStateArray2[17].setNullAt(10);/* 318 */ } else \{/* 319 */ agg_mutableStateArray2[17].write(10, agg_value484);/* 320 */ }/* 321 *//* 322 */ if (agg_isNull485) \{/* 323 */ agg_mutableStateArray2[17].setNullAt(11);/* 324 */ } else \{/* 325 */ agg_mutableStateArray2[17].write(11, agg_value485);/* 326 */ }/* 327 *//* 328 */ if (agg_isNull486) \{/* 329 */ agg_mutableStateArray2[17].setNullAt(12);/* 330 */ } else \{/* 331 */ agg_mutableStateArray2[17].write(12, agg_value486);/* 332 */ }/* 333 *//* 334 */ if (agg_isNull487) \{/* 335 */ agg_mutableStateArray2[17].setNullAt(13);/* 336 */ } else \{/* 337 */ agg_mutableStateArray2[17].write(13, agg_value487);/* 338 */ }/* 339 *//* 340 */ if (agg_isNull488) \{/* 341 */ agg_mutableStateArray2[17].setNullAt(14);/* 342 */ } else \{/* 343 */ agg_mutableStateArray2[17].write(14, agg_value488);/* 344 */ }/* 345 *//* 346 */ if (agg_isNull489) \{/* 347 */ agg_mutableStateArray2[17].setNullAt(15);/* 348 */ } else \{/* 349 */ agg_mutableStateArray2[17].write(15, agg_value489);/* 350 */ }/* 351 *//* 352 */ if (agg_isNull490) \{/* 353 */ agg_mutableStateArray2[17].setNullAt(16);/* 354 */ } else \{/* 355 */ agg_mutableStateArray2[17].write(16, agg_value490);/* 356 */ }/* 357 *//* 358 */ if (agg_isNull491) \{/* 359 */ agg_mutableStateArray2[17].setNullAt(17);/* 360 */ } else \{/* 361 */ agg_mutableStateArray2[17].write(17, agg_value491);/* 362 */ }/* 363 *//* 364 */ if (agg_isNull492) \{/* 365 */ agg_mutableStateArray2[17].setNullAt(18);/* 366 */ } else \{/* 367 */ agg_mutableStateArray2[17].write(18, agg_value492);/* 368 */ }/* 369 *//* 370 */ if (agg_isNull493) \{/* 371 */ agg_mutableStateArray2[17].setNullAt(19);/* 372 */ } else \{/* 373 */ agg_mutableStateArray2[17].write(19, agg_value493);/* 374 */ }/* 375 *//* 376 */ if (agg_isNull494) \{/* 377 */ agg_mutableStateArray2[17].setNullAt(20);/* 378 */ } else \{/* 379 */ agg_mutableStateArray2[17].write(20, agg_value494);/* 380 */ }/* 381 *//* 382 */ if (agg_isNull495) \{/* 383 */ agg_mutableStateArray2[17].setNullAt(21);/* 384 */ } else \{/* 385 */ agg_mutableStateArray2[17].write(21, agg_value495);/* 386 */ }/* 387 *//* 388 */ if (agg_isNull496) \{/* 389 */ agg_mutableStateArray2[17].setNullAt(22);/* 390 */ } else \{/* 391 */ agg_mutableStateArray2[17].write(22, agg_value496);/* 392 */ }/* 393 */ agg_mutableStateArray[17].setTotalSize(agg_mutableStateArray1[17].totalSize());/* 394 */ append(agg_mutableStateArray[17]);/* 395 *//* 396 */ }/* 397 *//* 398 */ private void agg_doConsume2(UTF8String agg_expr_02, boolean agg_exprIsNull_02, int agg_expr_12, boolean agg_exprIsNull_12, int agg_expr_22, boolean agg_exprIsNull_22, long agg_expr_32, boolean agg_exprIsNull_32, UTF8String agg_expr_42, boolean agg_exprIsNull_42, UTF8String agg_expr_52, boolean agg_exprIsNull_52, UTF8String agg_expr_62, boolean agg_exprIsNull_62, UTF8String agg_expr_71, boolean agg_exprIsNull_71, UTF8String agg_expr_81, boolean agg_exprIsNull_81, long agg_expr_91, boolean agg_exprIsNull_91, int agg_expr_101, boolean agg_exprIsNull_101, int agg_expr_111, boolean agg_exprIsNull_111, double agg_expr_121, boolean agg_exprIsNull_121, int agg_expr_131, boolean agg_exprIsNull_131, int agg_expr_141, boolean agg_exprIsNull_141, UTF8String agg_expr_151, boolean agg_exprIsNull_151, UTF8String agg_expr_161, boolean agg_exprIsNull_161) throws java.io.IOException \{/* 399 */ UnsafeRow agg_unsafeRowAggBuffer2 = null;/* 400 *//* 401 */ // generate grouping key/* 402 */ agg_mutableStateArray1[9].reset();/* 403 *//* 404 */ agg_mutableStateArray2[9].zeroOutNullBytes();/* 405 *//* 406 */ if (agg_exprIsNull_02) {/* 407 */ agg_mutableStateArray2[9].setNullAt(0);/* 408 */ } else \{/* 409 */ agg_mutableStateArray2[9].write(0, agg_expr_02);/* 410 */ }/* 411 *//* 412 */ if (agg_exprIsNull_12) \{/* 413 */ agg_mutableStateArray2[9].setNullAt(1);/* 414 */ } else \{/* 415 */ agg_mutableStateArray2[9].write(1, agg_expr_12);/* 416 */ }/* 417 *//* 418 */ if (agg_exprIsNull_22) \{/* 419 */ agg_mutableStateArray2[9].setNullAt(2);/* 420 */ } else \{/* 421 */ agg_mutableStateArray2[9].write(2, agg_expr_22);/* 422 */ }/* 423 *//* 424 */ if (agg_exprIsNull_32) \{/* 425 */ agg_mutableStateArray2[9].setNullAt(3);/* 426 */ } else \{/* 427 */ agg_mutableStateArray2[9].write(3, agg_expr_32);/* 428 */ }/* 429 *//* 430 */ if (agg_exprIsNull_42) \{/* 431 */ agg_mutableStateArray2[9].setNullAt(4);/* 432 */ } else \{/* 433 */ agg_mutableStateArray2[9].write(4, agg_expr_42);/* 434 */ }/* 435 *//* 436 */ if (agg_exprIsNull_52) \{/* 437 */ agg_mutableStateArray2[9].setNullAt(5);/* 438 */ } else \{/* 439 */ agg_mutableStateArray2[9].write(5, agg_expr_52);/* 440 */ }/* 441 *//* 442 */ if (agg_exprIsNull_62) \{/* 443 */ agg_mutableStateArray2[9].setNullAt(6);/* 444 */ } else \{/* 445 */ agg_mutableStateArray2[9].write(6, agg_expr_62);/* 446 */ }/* 447 *//* 448 */ if (agg_exprIsNull_71) \{/* 449 */ agg_mutableStateArray2[9].setNullAt(7);/* 450 */ } else \{/* 451 */ agg_mutableStateArray2[9].write(7, agg_expr_71);/* 452 */ }/* 453 *//* 454 */ if (agg_exprIsNull_81) \{/* 455 */ agg_mutableStateArray2[9].setNullAt(8);/* 456 */ } else \{/* 457 */ agg_mutableStateArray2[9].write(8, agg_expr_81);/* 458 */ }/* 459 *//* 460 */ if (agg_exprIsNull_91) \{/* 461 */ agg_mutableStateArray2[9].setNullAt(9);/* 462 */ } else \{/* 463 */ agg_mutableStateArray2[9].write(9, agg_expr_91);/* 464 */ }/* 465 *//* 466 */ if (agg_exprIsNull_101) \{/* 467 */ agg_mutableStateArray2[9].setNullAt(10);/* 468 */ } else \{/* 469 */ agg_mutableStateArray2[9].write(10, agg_expr_101);/* 470 */ }/* 471 *//* 472 */ if (agg_exprIsNull_111) \{/* 473 */ agg_mutableStateArray2[9].setNullAt(11);/* 474 */ } else \{/* 475 */ agg_mutableStateArray2[9].write(11, agg_expr_111);/* 476 */ }/* 477 *//* 478 */ if (agg_exprIsNull_121) \{/* 479 */ agg_mutableStateArray2[9].setNullAt(12);/* 480 */ } else \{/* 481 */ agg_mutableStateArray2[9].write(12, agg_expr_121);/* 482 */ }/* 483 *//* 484 */ if (agg_exprIsNull_131) \{/* 485 */ agg_mutableStateArray2[9].setNullAt(13);/* 486 */ } else \{/* 487 */ agg_mutableStateArray2[9].write(13, agg_expr_131);/* 488 */ }/* 489 *//* 490 */ if (agg_exprIsNull_141) \{/* 491 */ agg_mutableStateArray2[9].setNullAt(14);/* 492 */ } else \{/* 493 */ agg_mutableStateArray2[9].write(14, agg_expr_141);/* 494 */ }/* 495 *//* 496 */ if (agg_exprIsNull_151) \{/* 497 */ agg_mutableStateArray2[9].setNullAt(15);/* 498 */ } else \{/* 499 */ agg_mutableStateArray2[9].write(15, agg_expr_151);/* 500 */ }/* 501 *//* 502 */ if (agg_exprIsNull_161) \{/* 503 */ agg_mutableStateArray2[9].setNullAt(16);/* 504 */ } else \{/* 505 */ agg_mutableStateArray2[9].write(16, agg_expr_161);/* 506 */ }/* 507 */ agg_mutableStateArray[9].setTotalSize(agg_mutableStateArray1[9].totalSize());/* 508 */ int agg_value190 = 42;/* 509 *//* 510 */ if (!agg_exprIsNull_02) \{/* 511 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_02.getBaseObject(), agg_expr_02.getBaseOffset(), agg_expr_02.numBytes(), agg_value190);/* 512 */ }/* 513 *//* 514 */ if (!agg_exprIsNull_12) \{/* 515 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_12, agg_value190);/* 516 */ }/* 517 *//* 518 */ if (!agg_exprIsNull_22) \{/* 519 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_22, agg_value190);/* 520 */ }/* 521 *//* 522 */ if (!agg_exprIsNull_32) \{/* 523 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashLong(agg_expr_32, agg_value190);/* 524 */ }/* 525 *//* 526 */ if (!agg_exprIsNull_42) \{/* 527 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_42.getBaseObject(), agg_expr_42.getBaseOffset(), agg_expr_42.numBytes(), agg_value190);/* 528 */ }/* 529 *//* 530 */ if (!agg_exprIsNull_52) \{/* 531 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_52.getBaseObject(), agg_expr_52.getBaseOffset(), agg_expr_52.numBytes(), agg_value190);/* 532 */ }/* 533 *//* 534 */ if (!agg_exprIsNull_62) \{/* 535 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_62.getBaseObject(), agg_expr_62.getBaseOffset(), agg_expr_62.numBytes(), agg_value190);/* 536 */ }/* 537 *//* 538 */ if (!agg_exprIsNull_71) \{/* 539 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_71.getBaseObject(), agg_expr_71.getBaseOffset(), agg_expr_71.numBytes(), agg_value190);/* 540 */ }/* 541 *//* 542 */ if (!agg_exprIsNull_81) \{/* 543 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_81.getBaseObject(), agg_expr_81.getBaseOffset(), agg_expr_81.numBytes(), agg_value190);/* 544 */ }/* 545 *//* 546 */ if (!agg_exprIsNull_91) \{/* 547 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashLong(agg_expr_91, agg_value190);/* 548 */ }/* 549 *//* 550 */ if (!agg_exprIsNull_101) \{/* 551 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_101, agg_value190);/* 552 */ }/* 553 *//* 554 */ if (!agg_exprIsNull_111) \{/* 555 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_111, agg_value190);/* 556 */ }/* 557 *//* 558 */ if (!agg_exprIsNull_121) \{/* 559 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashLong(Double.doubleToLongBits(agg_expr_121), agg_value190);/* 560 */ }/* 561 *//* 562 */ if (!agg_exprIsNull_131) \{/* 563 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_131, agg_value190);/* 564 */ }/* 565 *//* 566 */ if (!agg_exprIsNull_141) \{/* 567 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_141, agg_value190);/* 568 */ }/* 569 *//* 570 */ if (!agg_exprIsNull_151) \{/* 571 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_151.getBaseObject(), agg_expr_151.getBaseOffset(), agg_expr_151.numBytes(), agg_value190);/* 572 */ }/* 573 *//* 574 */ if (!agg_exprIsNull_161) \{/* 575 */ agg_value190 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_161.getBaseObject(), agg_expr_161.getBaseOffset(), agg_expr_161.numBytes(), agg_value190);/* 576 */ }/* 577 */ if (true) \{/* 578 */ // try to get the buffer from hash map/* 579 */ agg_unsafeRowAggBuffer2 =/* 580 */ agg_hashMap2.getAggregationBufferFromUnsafeRow(agg_mutableStateArray[9], agg_value190);/* 581 */ }/* 582 */ // Can't allocate buffer from the hash map. Spill the map and fallback to sort-based/* 583 */ // aggregation after processing all input rows./* 584 */ if (agg_unsafeRowAggBuffer2 == null) \{/* 585 */ if (agg_sorter2 == null) {/* 586 */ agg_sorter2 = agg_hashMap2.destructAndCreateExternalSorter();/* 587 */ } else \{/* 588 */ agg_sorter2.merge(agg_hashMap2.destructAndCreateExternalSorter());/* 589 */ }/* 590 *//* 591 */ // the hash map had be spilled, it should have enough memory now,/* 592 */ // try to allocate buffer again./* 593 */ agg_unsafeRowAggBuffer2 = agg_hashMap2.getAggregationBufferFromUnsafeRow(/* 594 */ agg_mutableStateArray[9], agg_value190);/* 595 */ if (agg_unsafeRowAggBuffer2 == null) \{/* 596 */ // failed to allocate the first page/* 597 */ throw new OutOfMemoryError("No enough memory for aggregation");/* 598 */ }/* 599 */ }/* 600 *//* 601 */ // common sub-expressions/* 602 *//* 603 */ // evaluate aggregate function/* 604 *//* 605 */ // update unsafe row buffer/* 606 *//* 607 */ }/* 608 *//* 609 */ private void agg_doAggregateWithKeys() throws java.io.IOException \{/* 610 */ if (!agg_initAgg1) {/* 611 */ agg_initAgg1 = true;/* 612 */ long agg_beforeAgg3 = System.nanoTime();/* 613 */ agg_doAggregateWithKeys1();/* 614 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[33] /* aggTime */).add((System.nanoTime() - agg_beforeAgg3) / 1000000);/* 615 */ }/* 616 *//* 617 */ // output the result/* 618 *//* 619 */ while (agg_mapIter1.next()) \{/* 620 */ UnsafeRow agg_aggKey3 = (UnsafeRow) agg_mapIter1.getKey();/* 621 */ UnsafeRow agg_aggBuffer3 = (UnsafeRow) agg_mapIter1.getValue();/* 622 */ agg_doAggregateWithKeysOutput3(agg_aggKey3, agg_aggBuffer3);/* 623 *//* 624 */ if (shouldStop()) return;/* 625 */ }/* 626 *//* 627 */ agg_mapIter1.close();/* 628 */ if (agg_sorter1 == null) \{/* 629 */ agg_hashMap1.free();/* 630 */ }/* 631 *//* 632 */ agg_mapIter = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[0] /* plan */).finishAggregate(agg_hashMap, agg_sorter, ((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* peakMemory */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* spillSize */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[3] /* avgHashProbe */));/* 633 */ }/* 634 *//* 635 */ private void agg_doAggregateWithKeys1() throws java.io.IOException \{/* 636 */ if (!agg_initAgg2) {/* 637 */ agg_initAgg2 = true;/* 638 */ long agg_beforeAgg2 = System.nanoTime();/* 639 */ agg_doAggregateWithKeys2();/* 640 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[31] /* aggTime */).add((System.nanoTime() - agg_beforeAgg2) / 1000000);/* 641 */ }/* 642 *//* 643 */ // output the result/* 644 *//* 645 */ while (agg_mapIter2.next()) \{/* 646 */ UnsafeRow agg_aggKey2 = (UnsafeRow) agg_mapIter2.getKey();/* 647 */ UnsafeRow agg_aggBuffer2 = (UnsafeRow) agg_mapIter2.getValue();/* 648 */ agg_doAggregateWithKeysOutput2(agg_aggKey2, agg_aggBuffer2);/* 649 *//* 650 */ if (shouldStop()) return;/* 651 */ }/* 652 *//* 653 */ agg_mapIter2.close();/* 654 */ if (agg_sorter2 == null) \{/* 655 */ agg_hashMap2.free();/* 656 */ }/* 657 *//* 658 */ agg_mapIter1 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[4] /* plan */).finishAggregate(agg_hashMap1, agg_sorter1, ((org.apache.spark.sql.execution.metric.SQLMetric) references[5] /* peakMemory */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[6] /* spillSize */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[7] /* avgHashProbe */));/* 659 */ }/* 660 *//* 661 */ private void agg_doAggregateWithKeys4() throws java.io.IOException \{/* 662 */ while (inputadapter_input.hasNext() && !stopEarly()) {/* 663 */ InternalRow inputadapter_row = (InternalRow) inputadapter_input.next();/* 664 */ boolean inputadapter_isNull = inputadapter_row.isNullAt(0);/* 665 */ UTF8String inputadapter_value = inputadapter_isNull ? null : (inputadapter_row.getUTF8String(0));/* 666 */ boolean inputadapter_isNull1 = inputadapter_row.isNullAt(1);/* 667 */ long inputadapter_value1 = inputadapter_isNull1 ? -1L : (inputadapter_row.getLong(1));/* 668 */ boolean inputadapter_isNull2 = inputadapter_row.isNullAt(2);/* 669 */ UTF8String inputadapter_value2 = inputadapter_isNull2 ? null : (inputadapter_row.getUTF8String(2));/* 670 */ boolean inputadapter_isNull3 = inputadapter_row.isNullAt(3);/* 671 */ UTF8String inputadapter_value3 = inputadapter_isNull3 ? null : (inputadapter_row.getUTF8String(3));/* 672 */ boolean inputadapter_isNull4 = inputadapter_row.isNullAt(4);/* 673 */ UTF8String inputadapter_value4 = inputadapter_isNull4 ? null : (inputadapter_row.getUTF8String(4));/* 674 */ boolean inputadapter_isNull5 = inputadapter_row.isNullAt(5);/* 675 */ int inputadapter_value5 = inputadapter_isNull5 ? -1 : (inputadapter_row.getInt(5));/* 676 */ boolean inputadapter_isNull6 = inputadapter_row.isNullAt(6);/* 677 */ UTF8String inputadapter_value6 = inputadapter_isNull6 ? null : (inputadapter_row.getUTF8String(6));/* 678 *//* 679 */ agg_doConsume(inputadapter_row, inputadapter_value, inputadapter_isNull, inputadapter_value1, inputadapter_isNull1, inputadapter_value2, inputadapter_isNull2, inputadapter_value3, inputadapter_isNull3, inputadapter_value4, inputadapter_isNull4, inputadapter_value5, inputadapter_isNull5, inputadapter_value6, inputadapter_isNull6);/* 680 */ if (shouldStop()) return;/* 681 */ }/* 682 *//* 683 */ agg_mapIter4 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[16] /* plan */).finishAggregate(agg_hashMap4, agg_sorter4, ((org.apache.spark.sql.execution.metric.SQLMetric) references[17] /* peakMemory */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[18] /* spillSize */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[19] /* avgHashProbe */));/* 684 */ }/* 685 *//* 686 */ private void agg_doConsume1(long agg_expr_01, boolean agg_exprIsNull_01, UTF8String agg_expr_11, boolean agg_exprIsNull_11, int agg_expr_21, boolean agg_exprIsNull_21, int agg_expr_31, boolean agg_exprIsNull_31, UTF8String agg_expr_41, boolean agg_exprIsNull_41, double agg_expr_51, boolean agg_exprIsNull_51, int agg_expr_61, boolean agg_exprIsNull_61, int agg_expr_7, boolean agg_exprIsNull_7, UTF8String agg_expr_8, boolean agg_exprIsNull_8, int agg_expr_9, boolean agg_exprIsNull_9, UTF8String agg_expr_10, boolean agg_exprIsNull_10, int agg_expr_11, boolean agg_exprIsNull_11, long agg_expr_12, boolean agg_exprIsNull_12, UTF8String agg_expr_13, boolean agg_exprIsNull_13, UTF8String agg_expr_14, boolean agg_exprIsNull_14, UTF8String agg_expr_15, boolean agg_exprIsNull_15, UTF8String agg_expr_16, boolean agg_exprIsNull_16) throws java.io.IOException \{/* 687 */ UnsafeRow agg_unsafeRowAggBuffer1 = null;/* 688 *//* 689 */ // generate grouping key/* 690 */ agg_mutableStateArray1[6].reset();/* 691 *//* 692 */ agg_mutableStateArray2[6].zeroOutNullBytes();/* 693 *//* 694 */ if (agg_exprIsNull_15) {/* 695 */ agg_mutableStateArray2[6].setNullAt(0);/* 696 */ } else \{/* 697 */ agg_mutableStateArray2[6].write(0, agg_expr_15);/* 698 */ }/* 699 *//* 700 */ if (agg_exprIsNull_31) \{/* 701 */ agg_mutableStateArray2[6].setNullAt(1);/* 702 */ } else \{/* 703 */ agg_mutableStateArray2[6].write(1, agg_expr_31);/* 704 */ }/* 705 *//* 706 */ if (agg_exprIsNull_7) \{/* 707 */ agg_mutableStateArray2[6].setNullAt(2);/* 708 */ } else \{/* 709 */ agg_mutableStateArray2[6].write(2, agg_expr_7);/* 710 */ }/* 711 *//* 712 */ if (agg_exprIsNull_12) \{/* 713 */ agg_mutableStateArray2[6].setNullAt(3);/* 714 */ } else \{/* 715 */ agg_mutableStateArray2[6].write(3, agg_expr_12);/* 716 */ }/* 717 *//* 718 */ if (agg_exprIsNull_8) \{/* 719 */ agg_mutableStateArray2[6].setNullAt(4);/* 720 */ } else \{/* 721 */ agg_mutableStateArray2[6].write(4, agg_expr_8);/* 722 */ }/* 723 *//* 724 */ if (agg_exprIsNull_11) \{/* 725 */ agg_mutableStateArray2[6].setNullAt(5);/* 726 */ } else \{/* 727 */ agg_mutableStateArray2[6].write(5, agg_expr_11);/* 728 */ }/* 729 *//* 730 */ if (agg_exprIsNull_16) \{/* 731 */ agg_mutableStateArray2[6].setNullAt(6);/* 732 */ } else \{/* 733 */ agg_mutableStateArray2[6].write(6, agg_expr_16);/* 734 */ }/* 735 *//* 736 */ if (agg_exprIsNull_10) \{/* 737 */ agg_mutableStateArray2[6].setNullAt(7);/* 738 */ } else \{/* 739 */ agg_mutableStateArray2[6].write(7, agg_expr_10);/* 740 */ }/* 741 *//* 742 */ if (agg_exprIsNull_14) \{/* 743 */ agg_mutableStateArray2[6].setNullAt(8);/* 744 */ } else \{/* 745 */ agg_mutableStateArray2[6].write(8, agg_expr_14);/* 746 */ }/* 747 *//* 748 */ if (agg_exprIsNull_01) \{/* 749 */ agg_mutableStateArray2[6].setNullAt(9);/* 750 */ } else \{/* 751 */ agg_mutableStateArray2[6].write(9, agg_expr_01);/* 752 */ }/* 753 *//* 754 */ if (agg_exprIsNull_21) \{/* 755 */ agg_mutableStateArray2[6].setNullAt(10);/* 756 */ } else \{/* 757 */ agg_mutableStateArray2[6].write(10, agg_expr_21);/* 758 */ }/* 759 *//* 760 */ if (agg_exprIsNull_11) \{/* 761 */ agg_mutableStateArray2[6].setNullAt(11);/* 762 */ } else \{/* 763 */ agg_mutableStateArray2[6].write(11, agg_expr_11);/* 764 */ }/* 765 *//* 766 */ if (agg_exprIsNull_51) \{/* 767 */ agg_mutableStateArray2[6].setNullAt(12);/* 768 */ } else \{/* 769 */ agg_mutableStateArray2[6].write(12, agg_expr_51);/* 770 */ }/* 771 *//* 772 */ if (agg_exprIsNull_9) \{/* 773 */ agg_mutableStateArray2[6].setNullAt(13);/* 774 */ } else \{/* 775 */ agg_mutableStateArray2[6].write(13, agg_expr_9);/* 776 */ }/* 777 *//* 778 */ if (agg_exprIsNull_61) \{/* 779 */ agg_mutableStateArray2[6].setNullAt(14);/* 780 */ } else \{/* 781 */ agg_mutableStateArray2[6].write(14, agg_expr_61);/* 782 */ }/* 783 *//* 784 */ if (agg_exprIsNull_41) \{/* 785 */ agg_mutableStateArray2[6].setNullAt(15);/* 786 */ } else \{/* 787 */ agg_mutableStateArray2[6].write(15, agg_expr_41);/* 788 */ }/* 789 *//* 790 */ if (agg_exprIsNull_13) \{/* 791 */ agg_mutableStateArray2[6].setNullAt(16);/* 792 */ } else \{/* 793 */ agg_mutableStateArray2[6].write(16, agg_expr_13);/* 794 */ }/* 795 */ agg_mutableStateArray[6].setTotalSize(agg_mutableStateArray1[6].totalSize());/* 796 */ int agg_value87 = 42;/* 797 *//* 798 */ if (!agg_exprIsNull_15) \{/* 799 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_15.getBaseObject(), agg_expr_15.getBaseOffset(), agg_expr_15.numBytes(), agg_value87);/* 800 */ }/* 801 *//* 802 */ if (!agg_exprIsNull_31) \{/* 803 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_31, agg_value87);/* 804 */ }/* 805 *//* 806 */ if (!agg_exprIsNull_7) \{/* 807 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_7, agg_value87);/* 808 */ }/* 809 *//* 810 */ if (!agg_exprIsNull_12) \{/* 811 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashLong(agg_expr_12, agg_value87);/* 812 */ }/* 813 *//* 814 */ if (!agg_exprIsNull_8) \{/* 815 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_8.getBaseObject(), agg_expr_8.getBaseOffset(), agg_expr_8.numBytes(), agg_value87);/* 816 */ }/* 817 *//* 818 */ if (!agg_exprIsNull_11) \{/* 819 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_11.getBaseObject(), agg_expr_11.getBaseOffset(), agg_expr_11.numBytes(), agg_value87);/* 820 */ }/* 821 *//* 822 */ if (!agg_exprIsNull_16) \{/* 823 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_16.getBaseObject(), agg_expr_16.getBaseOffset(), agg_expr_16.numBytes(), agg_value87);/* 824 */ }/* 825 *//* 826 */ if (!agg_exprIsNull_10) \{/* 827 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_10.getBaseObject(), agg_expr_10.getBaseOffset(), agg_expr_10.numBytes(), agg_value87);/* 828 */ }/* 829 *//* 830 */ if (!agg_exprIsNull_14) \{/* 831 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_14.getBaseObject(), agg_expr_14.getBaseOffset(), agg_expr_14.numBytes(), agg_value87);/* 832 */ }/* 833 *//* 834 */ if (!agg_exprIsNull_01) \{/* 835 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashLong(agg_expr_01, agg_value87);/* 836 */ }/* 837 *//* 838 */ if (!agg_exprIsNull_21) \{/* 839 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_21, agg_value87);/* 840 */ }/* 841 *//* 842 */ if (!agg_exprIsNull_11) \{/* 843 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_11, agg_value87);/* 844 */ }/* 845 *//* 846 */ if (!agg_exprIsNull_51) \{/* 847 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashLong(Double.doubleToLongBits(agg_expr_51), agg_value87);/* 848 */ }/* 849 *//* 850 */ if (!agg_exprIsNull_9) \{/* 851 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_9, agg_value87);/* 852 */ }/* 853 *//* 854 */ if (!agg_exprIsNull_61) \{/* 855 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(agg_expr_61, agg_value87);/* 856 */ }/* 857 *//* 858 */ if (!agg_exprIsNull_41) \{/* 859 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_41.getBaseObject(), agg_expr_41.getBaseOffset(), agg_expr_41.numBytes(), agg_value87);/* 860 */ }/* 861 *//* 862 */ if (!agg_exprIsNull_13) \{/* 863 */ agg_value87 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashUnsafeBytes(agg_expr_13.getBaseObject(), agg_expr_13.getBaseOffset(), agg_expr_13.numBytes(), agg_value87);/* 864 */ }/* 865 */ if (true) \{/* 866 */ // try to get the buffer from hash map/* 867 */ agg_unsafeRowAggBuffer1 =/* 868 */ agg_hashMap3.getAggregationBufferFromUnsafeRow(agg_mutableStateArray[6], agg_value87);/* 869 */ }/* 870 */ // Can't allocate buffer from the hash map. Spill the map and fallback to sort-based/* 871 */ // aggregation after processing all input rows./* 872 */ if (agg_unsafeRowAggBuffer1 == null) \{/* 873 */ if (agg_sorter3 == null) {/* 874 */ agg_sorter3 = agg_hashMap3.destructAndCreateExternalSorter();/* 875 */ } else \{/* 876 */ agg_sorter3.merge(agg_hashMap3.destructAndCreateExternalSorter());/* 877 */ }/* 878 *//* 879 */ // the hash map had be spilled, it should have enough memory now,/* 880 */ // try to allocate buffer again./* 881 */ agg_unsafeRowAggBuffer1 = agg_hashMap3.getAggregationBufferFromUnsafeRow(/* 882 */ agg_mutableStateArray[6], agg_value87);/* 883 */ if (agg_unsafeRowAggBuffer1 == null) \{/* 884 */ // failed to allocate the first page/* 885 */ throw new OutOfMemoryError("No enough memory for aggregation");/* 886 */ }/* 887 */ }/* 888 *//* 889 */ // common sub-expressions/* 890 *//* 891 */ // evaluate aggregate function/* 892 *//* 893 */ // update unsafe row buffer/* 894 *//* 895 */ }/* 896 *//* 897 */ private void wholestagecodegen_init_5() \{/* 898 */ agg_mutableStateArray2[11] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[11], 23);/* 899 */ agg_mutableStateArray[12] = new UnsafeRow(23);/* 900 */ agg_mutableStateArray1[12] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[12], 352);/* 901 */ agg_mutableStateArray2[12] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[12], 23);/* 902 */ agg_mutableStateArray[13] = new UnsafeRow(23);/* 903 */ agg_mutableStateArray1[13] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[13], 352);/* 904 */ agg_mutableStateArray2[13] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[13], 23);/* 905 */ agg_mutableStateArray[14] = new UnsafeRow(23);/* 906 */ agg_mutableStateArray1[14] = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_mutableStateArray[14], 352);/* 907 */ agg_mutableStateArray2[14] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_mutableStateArray1[14], 23);/* 908 *//* 909 */ }/* 910 *//* 911 */ private void agg_doConsume4(UTF8String agg_expr_04, boolean agg_exprIsNull_04, int agg_expr_14, boolean agg_exprIsNull_14, int agg_expr_24, boolean agg_exprIsNull_24, long agg_expr_34, boolean agg_exprIsNull_34, UTF8String agg_expr_44, boolean agg_exprIsNull_44, UTF8String agg_expr_54, boolean agg_exprIsNull_54, UTF8String agg_expr_64, boolean agg_exprIsNull_64, UTF8String agg_expr_73, boolean agg_exprIsNull_73, UTF8String agg_expr_83, boolean agg_exprIsNull_83, UTF8String agg_expr_93, boolean agg_exprIsNull_93, long agg_expr_103, boolean agg_exprIsNull_103, int agg_expr_113, boolean agg_exprIsNull_113, int agg_expr_123, boolean agg_exprIsNull_123, int agg_expr_133, boolean agg_exprIsNull_133, int agg_expr_143, boolean agg_exprIsNull_143, UTF8String agg_expr_153, boolean agg_exprIsNull_153, double agg_expr_163, boolean agg_exprIsNull_163, long agg_expr_171, boolean agg_exprIsNull_171, int agg_expr_181, boolean agg_exprIsNull_181, int agg_expr_191, boolean agg_exprIsNull_191, UTF8String agg_expr_201, boolean agg_exprIsNull_201, UTF8String agg_expr_211, boolean agg_exprIsNull_211, UTF8String agg_expr_221, boolean agg_exprIsNull_221) throws java.io.IOException \{/* 912 */ UnsafeRow agg_unsafeRowAggBuffer4 = null;/* 913 *//* 914 */ // generate grouping key/* 915 */ agg_mutableStateArray1[16].reset();/* 916 *//* 917 */ agg_mutableStateArray2[16].zeroOutNullBytes();/* 918 *//* 919 */ if (agg_exprIsNull_04) {/* 920 */ agg_mutableStateArray2[16].setNullAt(0);/* 921 */ } else \{/* 922 */ agg_mutableStateArray2[16].write(0, agg_expr_04);/* 923 */ }/* 924 *//* 925 */ if (agg_exprIsNull_14) \{/* 926 */ agg_mutableStateArray2[16].setNullAt(1);/* 927 */ } else \{/* 928 */ agg_mutableStateArray2[16].write(1, agg_expr_14);/* 929 */ }/* 930 *//* 931 */ if (agg_exprIsNull_24) \{/* 932 */ agg_mutableStateArray2[16].setNullAt(2);/* 933 */ } else \{/* 934 */ agg_mutableStateArray2[16].write(2, agg_expr_24);/* 935 */ }/* 936 *//* 937 */ if (agg_exprIsNull_34) \{/* 938 */ agg_mutableStateArray2[16].setNullAt(3);/* 939 */ } else \{/* 940 */ agg_mutableStateArray2[16].write(3, agg_expr_34);/* 941 */ }/* 942 *//* 943 */ if (agg_exprIsNull_44) \{/* 944 */ agg_mutableStateArray2[16].setNullAt(4);/* 945 */ } else \{/* 946 */ agg_mutableStateArray2[16].write(4, agg_expr_44);/* 947 */ }/* 948 *//* 949 */ if (agg_exprIsNull_54) \{/* 950 */ agg_mutableStateArray2[16].setNullAt(5);/* 951 */ } else \{/* 952 */ agg_mutableStateArray2[16].write(5, agg_expr_54);/* 953 */ }/* 954 *//* 955 */ if (agg_exprIsNull_64) \{/* 956 */ agg_mutableStateArray2[16].setNullAt(6);/* 957 */ } else \{/* 958 */ agg_mutableStateArray2[16].write(6, agg_expr_64);/* 959 */ }/* 960 *//* 961 */ if (agg_exprIsNull_73) \{/* 962 */ agg_mutableStateArray2[16].setNullAt(7);/* 963 */ } else \{/* 964 */ agg_mutableStateArray2[16].write(7, agg_expr_73);/* 965 */ }/* 966 *//* 967 */ if (agg_exprIsNull_83) \{/* 968 */ agg_mutableStateArray2[16].setNullAt(8);/* 969 */ } else \{/* 970 */ agg_mutableStateArray2[16].write(8, agg_expr_83);/* 971 */ }/* 972 *//* 973 */ if (agg_exprIsNull_93) \{/* 974 */ agg_mutableStateArray2[16].setNullAt(9);/* 975 */ } else \{/* 976 */ agg_mutableStateArray2[16].write(9, agg_expr_93);/* 977 */ }/* 978 *//* 979 */ if (agg_exprIsNull_103) \{/* 980 */ agg_mutableStateArray2[16].setNullAt(10);/* 981 */ } else \{/* 982 */ agg_mutableStateArray2[16].write(10, agg_expr_103);/* 983 */ }/* 984 *//* 985 */ if (agg_exprIsNull_113) \{/* 986 */ agg_mutableStateArray2[16].setNullAt(11);/* 987 */ } else \{/* 988 */ agg_mutableStateArray2[16].write(11, agg_expr_113);/* 989 */ }/* 990 *//* 991 */ if (agg_exprIsNull_123) \{/* 992 */ agg_mutableStateArray2[16].setNullAt(12);/* 993 */ } else \{/* 994 */ agg_mutableStateArray2[16].write(12, agg_expr_123);/* 995 */ }/* 996 *//* 997 */ if (agg_exprIsNull_133) \{/* 998 */ agg_mutableStateArray2[16].setNullAt(13);/* 999 */ } else {/* 1000 */ agg_mutableStateArray2[16].write(13, agg_expr_133);/* 1001 */ [truncated to 1000 lines (total lines is 1834)]
2019-09-20 17:10:14,222 [Driver] WARN org.apache.spark.sql.execution.WholeStageCodegenExec - Whole-stage codegen disabled for plan (id=7): *(7) HashAggregate(keys=[atg_order_number#609, purchase_date#39, product_sk#584, pode_transaction_number#633L, void_flag#51, order_number#35, country_origin#617, department_name#789, pode_order_number#625, merch_flag#98, transaction_number#33L, transaction_sequence_number#864, department_number#147, p_product_sk#796, pode_transaction_sequence#641, brand_name#775, discount_amount#43, sku_number#153L, shipped_date#595, promotion_sk#573, transaction_type#40, sample_sku_flag#97, product_category#782], functions=[], output=[transaction_number#33L, order_number#35, transaction_sequence_number#864, purchase_date#39, transaction_type#40, discount_amount#43, promotion_sk#573, product_sk#584, void_flag#51, shipped_date#595, pode_order_number#625, pode_transaction_sequence#641, pode_transaction_number#633L, sample_sku_flag#97, merch_flag#98, atg_order_number#609, country_origin#617, sku_number#153L, p_product_sk#796, brand_name#775, department_number#147, product_category#782, department_name#789])+- *(7) HashAggregate(keys=[atg_order_number#609, purchase_date#39, product_sk#584, pode_transaction_number#633L, void_flag#51, order_number#35, country_origin#617, department_name#789, pode_order_number#625, merch_flag#98, transaction_number#33L, transaction_sequence_number#864, department_number#147, p_product_sk#796, pode_transaction_sequence#641, brand_name#775, discount_amount#43, sku_number#153L, shipped_date#595, promotion_sk#573, transaction_type#40, sample_sku_flag#97, product_category#782], functions=[], output=[atg_order_number#609, purchase_date#39, product_sk#584, pode_transaction_number#633L, void_flag#51, order_number#35, country_origin#617, department_name#789, pode_order_number#625, merch_flag#98, transaction_number#33L, transaction_sequence_number#864, department_number#147, p_product_sk#796, pode_transaction_sequence#641, brand_name#775, discount_amount#43, sku_number#153L, shipped_date#595, promotion_sk#573, transaction_type#40, sample_sku_flag#97, product_category#782]) +- *(7) BroadcastHashJoin [product_sk#584], [p_product_sk#796], Inner, BuildRight :- *(7) HashAggregate(keys=[atg_order_number#609, purchase_date#39, product_sk#584, pode_transaction_number#633L, void_flag#51, order_number#35, country_origin#617, pode_order_number#625, merch_flag#98, transaction_number#33L, transaction_sequence_number#864, pode_transaction_sequence#641, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, sample_sku_flag#97], functions=[], output=[transaction_number#33L, order_number#35, transaction_sequence_number#864, purchase_date#39, transaction_type#40, discount_amount#43, promotion_sk#573, product_sk#584, void_flag#51, shipped_date#595, pode_order_number#625, pode_transaction_sequence#641, pode_transaction_number#633L, sample_sku_flag#97, merch_flag#98, atg_order_number#609, country_origin#617]) : +- *(7) HashAggregate(keys=[atg_order_number#609, purchase_date#39, product_sk#584, pode_transaction_number#633L, void_flag#51, order_number#35, country_origin#617, pode_order_number#625, merch_flag#98, transaction_number#33L, transaction_sequence_number#864, pode_transaction_sequence#641, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, sample_sku_flag#97], functions=[], output=[atg_order_number#609, purchase_date#39, product_sk#584, pode_transaction_number#633L, void_flag#51, order_number#35, country_origin#617, pode_order_number#625, merch_flag#98, transaction_number#33L, transaction_sequence_number#864, pode_transaction_sequence#641, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, sample_sku_flag#97]) : +- *(7) Project [transaction_number#33L, order_number#35, transaction_sequence#36 AS transaction_sequence_number#864, purchase_date#39, transaction_type#40, discount_amount#43, promotion_sk#573, product_sk#584, void_flag#51, shipped_date#595, pode_order_number#625, pode_transaction_sequence#641, pode_transaction_number#633L, sample_sku_flag#97, merch_flag#98, atg_order_number#609, country_origin#617] : +- *(7) BroadcastHashJoin [order_number#35, transaction_sequence#36, transaction_number#33L], [pode_order_number#625, pode_transaction_sequence#641, pode_transaction_number#633L], Inner, BuildLeft : :- BroadcastExchange HashedRelationBroadcastMode(List(input[1, string, true], input[2, int, true], input[0, bigint, true])) : : +- *(3) HashAggregate(keys=[purchase_date#39, product_sk#584, void_flag#51, order_number#35, transaction_number#33L, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, transaction_sequence#36], functions=[], output=[transaction_number#33L, order_number#35, transaction_sequence#36, purchase_date#39, transaction_type#40, discount_amount#43, promotion_sk#573, product_sk#584, void_flag#51, shipped_date#595]) : : +- Exchange hashpartitioning(purchase_date#39, product_sk#584, void_flag#51, order_number#35, transaction_number#33L, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, transaction_sequence#36, 200) : : +- *(2) HashAggregate(keys=[purchase_date#39, product_sk#584, void_flag#51, order_number#35, transaction_number#33L, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, transaction_sequence#36], functions=[], output=[purchase_date#39, product_sk#584, void_flag#51, order_number#35, transaction_number#33L, discount_amount#43, shipped_date#595, promotion_sk#573, transaction_type#40, transaction_sequence#36]) : : +- *(2) Project [transaction_number#33L, order_number#35, transaction_sequence#36, purchase_date#39, transaction_type#40, discount_amount#43, promotion_redemptionid#47 AS promotion_sk#573, product_skuid#49 AS product_sk#584, void_flag#51, shipment_date#38 AS shipped_date#595] : : +- Exchange hashpartitioning(order_number#35, _nondeterministic#571, 1000) : : +- *(1) Filter ((((isnotnull(transaction_number#33L) && isnotnull(transaction_sequence#36)) && isnotnull(order_number#35)) && isnotnull(product_skuid#49)) && NOT (product_skuid#49 = 0)) : : +- *(1) Project [transaction_number#33L, order_number#35, transaction_sequence#36, purchase_date#39, transaction_type#40, discount_amount#43, promotion_redemptionid#47, product_skuid#49, void_flag#51, shipment_date#38, rand(-4526990331774495234) AS _nondeterministic#571] : : +- *(1) FileScan parquet [transaction_number#33L,pos_terminalid#34,order_number#35,transaction_sequence#36,fulfilment_date#37,shipment_date#38,purchase_date#39,transaction_type#40,quantity#41,transaction_amount#42,discount_amount#43,discount_type#44,adjusted_amount#45,adjustment_type#46,promotion_redemptionid#47,coupon_redemptionid#48,product_skuid#49,currency_code#50,void_flag#51,dt#52] Batched: true, Format: Parquet, Location: InMemoryFileIndex[........, PartitionFilters: [], PushedFilters: [], ReadSchema: struct<transaction_number:bigint,pos_terminalid:int,order_number:string,transaction_sequence:int,... : +- *(7) HashAggregate(keys=[atg_order_number#609, pode_transaction_number#633L, country_origin#617, pode_order_number#625, merch_flag#98, pode_transaction_sequence#641, sample_sku_flag#97], functions=[], output=[pode_order_number#625, pode_transaction_sequence#641, pode_transaction_number#633L, sample_sku_flag#97, merch_flag#98, atg_order_number#609, country_origin#617]) : +- Exchange hashpartitioning(atg_order_number#609, pode_transaction_number#633L, country_origin#617, pode_order_number#625, merch_flag#98, pode_transaction_sequence#641, sample_sku_flag#97, 200) : +- *(5) HashAggregate(keys=[atg_order_number#609, pode_transaction_number#633L, country_origin#617, pode_order_number#625, merch_flag#98, pode_transaction_sequence#641, sample_sku_flag#97], functions=[], output=[atg_order_number#609, pode_transaction_number#633L, country_origin#617, pode_order_number#625, merch_flag#98, pode_transaction_sequence#641, sample_sku_flag#97]) : +- *(5) Project [order_number#95 AS pode_order_number#625, transaction_sequence#87 AS pode_transaction_sequence#641, transaction_number#83L AS pode_transaction_number#633L, sample_sku_flag#97, merch_flag#98, atgorder_number#105 AS atg_order_number#609, country_of_origin#106 AS country_origin#617] : +- Exchange hashpartitioning(order_number#95, _nondeterministic#607, 1000) : +- *(4) Filter ((isnotnull(transaction_sequence#87) && isnotnull(order_number#95)) && isnotnull(transaction_number#83L)) : +- *(4) Project [order_number#95, transaction_sequence#87, transaction_number#83L, sample_sku_flag#97, merch_flag#98, atgorder_number#105, country_of_origin#106, rand(-2180668511642121172) AS _nondeterministic#607] : +- *(4) FileScan parquet [transaction_number#83L,completed_date#84,return_date#85,return_complete_date#86,transaction_sequence#87,update_date#88,original_order_date#89,sas_id#90L,usa_id#91L,gift_card_sk#92,source#93,match_flag#94,order_number#95,sas_load_date#96,sample_sku_flag#97,merch_flag#98,received_date#99,purchase_date#100,n_usa_id#101L,n_sas_id#102L,profile_number#103,altorder_number#104,atgorder_number#105,country_of_origin#106,dt#107] Batched: true, Format: Parquet, Location: InMemoryFileIndex[..., PartitionFilters: [], PushedFilters: [], ReadSchema: struct<transaction_number:bigint,completed_date:date,return_date:date,return_complete_date:date,t... +- BroadcastExchange HashedRelationBroadcastMode(List(cast(input[1, int, true] as bigint))) +- *(6) Project [sku_number#153L, product_sk#141 AS p_product_sk#796, brand_description#175 AS brand_name#775, department_number#147, primary_category#207 AS product_category#782, reporting_department_name#204 AS department_name#789] +- *(6) Filter (isnotnull(product_sk#141) && NOT (product_sk#141 = 0)) +- *(6) FileScan parquet [product_sk#141,department_number#147,sku_number#153L,brand_description#175,reporting_department_name#204,primary_category#207] Batched: true, Format: Parquet, Location: InMemoryFileIndex[smt..., PartitionFilters: [], PushedFilters: [IsNotNull(product_sk), Not(EqualTo(product_sk,0))], ReadSchema: struct<product_sk:int,department_number:int,sku_number:bigint,brand_description:string,reporting_...
> ERROR codegen.CodeGenerator: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java'
> ---------------------------------------------------------------------------------------------------------------------
>
> Key: SPARK-19984
> URL: https://issues.apache.org/jira/browse/SPARK-19984
> Project: Spark
> Issue Type: Bug
> Components: Optimizer
> Affects Versions: 2.1.0
> Reporter: Andrey Yakovenko
> Priority: Major
> Labels: bulk-closed
> Attachments: after_adding_count.txt, before_adding_count.txt
>
>
> I had this error few time on my local hadoop 2.7.3+Spark2.1.0 environment. This is not permanent error, next time i run it could disappear. Unfortunately i don't know how to reproduce the issue. As you can see from the log my logic is pretty complicated.
> Here is a part of log i've got (container_1489514660953_0015_01_000001)
> {code}
> 17/03/16 11:07:04 ERROR codegen.CodeGenerator: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 151, Column 29: A method named "compare" is not declared in any enclosing class nor any supertype, nor through a static import
> /* 001 */ public Object generate(Object[] references) {
> /* 002 */ return new GeneratedIterator(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ final class GeneratedIterator extends org.apache.spark.sql.execution.BufferedRowIterator {
> /* 006 */ private Object[] references;
> /* 007 */ private scala.collection.Iterator[] inputs;
> /* 008 */ private boolean agg_initAgg;
> /* 009 */ private boolean agg_bufIsNull;
> /* 010 */ private long agg_bufValue;
> /* 011 */ private boolean agg_initAgg1;
> /* 012 */ private boolean agg_bufIsNull1;
> /* 013 */ private long agg_bufValue1;
> /* 014 */ private scala.collection.Iterator smj_leftInput;
> /* 015 */ private scala.collection.Iterator smj_rightInput;
> /* 016 */ private InternalRow smj_leftRow;
> /* 017 */ private InternalRow smj_rightRow;
> /* 018 */ private UTF8String smj_value2;
> /* 019 */ private java.util.ArrayList smj_matches;
> /* 020 */ private UTF8String smj_value3;
> /* 021 */ private UTF8String smj_value4;
> /* 022 */ private org.apache.spark.sql.execution.metric.SQLMetric smj_numOutputRows;
> /* 023 */ private UnsafeRow smj_result;
> /* 024 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder smj_holder;
> /* 025 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter smj_rowWriter;
> /* 026 */ private org.apache.spark.sql.execution.metric.SQLMetric agg_numOutputRows;
> /* 027 */ private org.apache.spark.sql.execution.metric.SQLMetric agg_aggTime;
> /* 028 */ private UnsafeRow agg_result;
> /* 029 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
> /* 030 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter agg_rowWriter;
> /* 031 */ private org.apache.spark.sql.execution.metric.SQLMetric agg_numOutputRows1;
> /* 032 */ private org.apache.spark.sql.execution.metric.SQLMetric agg_aggTime1;
> /* 033 */ private UnsafeRow agg_result1;
> /* 034 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder1;
> /* 035 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter agg_rowWriter1;
> /* 036 */
> /* 037 */ public GeneratedIterator(Object[] references) {
> /* 038 */ this.references = references;
> /* 039 */ }
> /* 040 */
> /* 041 */ public void init(int index, scala.collection.Iterator[] inputs) {
> /* 042 */ partitionIndex = index;
> /* 043 */ this.inputs = inputs;
> /* 044 */ wholestagecodegen_init_0();
> /* 045 */ wholestagecodegen_init_1();
> /* 046 */
> /* 047 */ }
> /* 048 */
> /* 049 */ private void wholestagecodegen_init_0() {
> /* 050 */ agg_initAgg = false;
> /* 051 */
> /* 052 */ agg_initAgg1 = false;
> /* 053 */
> /* 054 */ smj_leftInput = inputs[0];
> /* 055 */ smj_rightInput = inputs[1];
> /* 056 */
> /* 057 */ smj_rightRow = null;
> /* 058 */
> /* 059 */ smj_matches = new java.util.ArrayList();
> /* 060 */
> /* 061 */ this.smj_numOutputRows = (org.apache.spark.sql.execution.metric.SQLMetric) references[0];
> /* 062 */ smj_result = new UnsafeRow(2);
> /* 063 */ this.smj_holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(smj_result, 64);
> /* 064 */ this.smj_rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(smj_holder, 2);
> /* 065 */ this.agg_numOutputRows = (org.apache.spark.sql.execution.metric.SQLMetric) references[1];
> /* 066 */ this.agg_aggTime = (org.apache.spark.sql.execution.metric.SQLMetric) references[2];
> /* 067 */ agg_result = new UnsafeRow(1);
> /* 068 */ this.agg_holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result, 0);
> /* 069 */ this.agg_rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder, 1);
> /* 070 */ this.agg_numOutputRows1 = (org.apache.spark.sql.execution.metric.SQLMetric) references[3];
> /* 071 */ this.agg_aggTime1 = (org.apache.spark.sql.execution.metric.SQLMetric) references[4];
> /* 072 */
> /* 073 */ }
> /* 074 */
> /* 075 */ private void agg_doAggregateWithoutKey() throws java.io.IOException {
> /* 076 */ // initialize aggregation buffer
> /* 077 */ agg_bufIsNull = false;
> /* 078 */ agg_bufValue = 0L;
> /* 079 */
> /* 080 */ while (!agg_initAgg1) {
> /* 081 */ agg_initAgg1 = true;
> /* 082 */ long agg_beforeAgg = System.nanoTime();
> /* 083 */ agg_doAggregateWithoutKey1();
> /* 084 */ agg_aggTime.add((System.nanoTime() - agg_beforeAgg) / 1000000);
> /* 085 */
> /* 086 */ // output the result
> /* 087 */
> /* 088 */ agg_numOutputRows.add(1);
> /* 089 */ // do aggregate
> /* 090 */ // common sub-expressions
> /* 091 */
> /* 092 */ // evaluate aggregate function
> /* 093 */ boolean agg_isNull8 = false;
> /* 094 */
> /* 095 */ long agg_value8 = -1L;
> /* 096 */ agg_value8 = agg_bufValue + agg_bufValue1;
> /* 097 */ // update aggregation buffer
> /* 098 */ agg_bufIsNull = false;
> /* 099 */ agg_bufValue = agg_value8;
> /* 100 */ }
> /* 101 */
> /* 102 */ }
> /* 103 */
> /* 104 */ private void agg_doAggregateWithoutKey1() throws java.io.IOException {
> /* 105 */ // initialize aggregation buffer
> /* 106 */ agg_bufIsNull1 = false;
> /* 107 */ agg_bufValue1 = 0L;
> /* 108 */
> /* 109 */ while (findNextInnerJoinRows(smj_leftInput, smj_rightInput)) {
> /* 110 */ int smj_size = smj_matches.size();
> /* 111 */ smj_value4 = smj_leftRow.getUTF8String(0);
> /* 112 */ for (int smj_i = 0; smj_i < smj_size; smj_i ++) {
> /* 113 */ InternalRow smj_rightRow1 = (InternalRow) smj_matches.get(smj_i);
> /* 114 */
> /* 115 */ smj_numOutputRows.add(1);
> /* 116 */
> /* 117 */ // do aggregate
> /* 118 */ // common sub-expressions
> /* 119 */
> /* 120 */ // evaluate aggregate function
> /* 121 */ boolean agg_isNull4 = false;
> /* 122 */
> /* 123 */ long agg_value4 = -1L;
> /* 124 */ agg_value4 = agg_bufValue1 + 1L;
> /* 125 */ // update aggregation buffer
> /* 126 */ agg_bufIsNull1 = false;
> /* 127 */ agg_bufValue1 = agg_value4;
> /* 128 */
> /* 129 */ }
> /* 130 */ if (shouldStop()) return;
> /* 131 */ }
> /* 132 */
> /* 133 */ }
> /* 134 */
> /* 135 */ private boolean findNextInnerJoinRows(
> /* 136 */ scala.collection.Iterator leftIter,
> /* 137 */ scala.collection.Iterator rightIter) {
> /* 138 */ smj_leftRow = null;
> /* 139 */ int comp = 0;
> /* 140 */ while (smj_leftRow == null) {
> /* 141 */ if (!leftIter.hasNext()) return false;
> /* 142 */ smj_leftRow = (InternalRow) leftIter.next();
> /* 143 */
> /* 144 */ if (agg_bufIsNull) {
> /* 145 */ smj_leftRow = null;
> /* 146 */ continue;
> /* 147 */ }
> /* 148 */ if (!smj_matches.isEmpty()) {
> /* 149 */ comp = 0;
> /* 150 */ if (comp == 0) {
> /* 151 */ comp = agg_bufValue.compare(smj_value3);
> /* 152 */ }
> /* 153 */
> /* 154 */ if (comp == 0) {
> /* 155 */ return true;
> /* 156 */ }
> /* 157 */ smj_matches.clear();
> /* 158 */ }
> /* 159 */
> /* 160 */ do {
> /* 161 */ if (smj_rightRow == null) {
> /* 162 */ if (!rightIter.hasNext()) {
> /* 163 */ smj_value3 = agg_bufValue.clone();
> /* 164 */ return !smj_matches.isEmpty();
> /* 165 */ }
> /* 166 */ smj_rightRow = (InternalRow) rightIter.next();
> /* 167 */
> /* 168 */ if (agg_bufIsNull) {
> /* 169 */ smj_rightRow = null;
> /* 170 */ continue;
> /* 171 */ }
> /* 172 */ smj_value2 = agg_bufValue.clone();
> /* 173 */ }
> /* 174 */
> /* 175 */ comp = 0;
> /* 176 */ if (comp == 0) {
> /* 177 */ comp = agg_bufValue.compare(smj_value2);
> /* 178 */ }
> /* 179 */
> /* 180 */ if (comp > 0) {
> /* 181 */ smj_rightRow = null;
> /* 182 */ } else if (comp < 0) {
> /* 183 */ if (!smj_matches.isEmpty()) {
> /* 184 */ smj_value3 = agg_bufValue.clone();
> /* 185 */ return true;
> /* 186 */ }
> /* 187 */ smj_leftRow = null;
> /* 188 */ } else {
> /* 189 */ smj_matches.add(smj_rightRow.copy());
> /* 190 */ smj_rightRow = null;;
> /* 191 */ }
> /* 192 */ } while (smj_leftRow != null);
> /* 193 */ }
> /* 194 */ return false; // unreachable
> /* 195 */ }
> /* 196 */
> /* 197 */ private void wholestagecodegen_init_1() {
> /* 198 */ agg_result1 = new UnsafeRow(1);
> /* 199 */ this.agg_holder1 = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result1, 0);
> /* 200 */ this.agg_rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder1, 1);
> /* 201 */
> /* 202 */ }
> /* 203 */
> /* 204 */ protected void processNext() throws java.io.IOException {
> /* 205 */ while (!agg_initAgg) {
> /* 206 */ agg_initAgg = true;
> /* 207 */ long agg_beforeAgg1 = System.nanoTime();
> /* 208 */ agg_doAggregateWithoutKey();
> /* 209 */ agg_aggTime1.add((System.nanoTime() - agg_beforeAgg1) / 1000000);
> /* 210 */
> /* 211 */ // output the result
> /* 212 */
> /* 213 */ agg_numOutputRows1.add(1);
> /* 214 */ agg_rowWriter1.zeroOutNullBytes();
> /* 215 */
> /* 216 */ if (agg_bufIsNull) {
> /* 217 */ agg_rowWriter1.setNullAt(0);
> /* 218 */ } else {
> /* 219 */ agg_rowWriter1.write(0, agg_bufValue);
> /* 220 */ }
> /* 221 */ append(agg_result1.copy());
> /* 222 */ }
> /* 223 */ }
> /* 224 */ }
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 151, Column 29: A method named "compare" is not declared in any enclosing class nor any supertype, nor through a static import
> at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
> at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:8130)
> at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4421)
> at org.codehaus.janino.UnitCompiler.access$7500(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$12.visitMethodInvocation(UnitCompiler.java:3774)
> at org.codehaus.janino.UnitCompiler$12.visitMethodInvocation(UnitCompiler.java:3762)
> at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:4328)
> at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3762)
> at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4933)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
> at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
> at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
> at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
> at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
> at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1436)
> at org.codehaus.janino.UnitCompiler.access$1600(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1376)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$Block.accept(Java.java:2471)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2228)
> at org.codehaus.janino.UnitCompiler.access$1800(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1378)
> at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$IfStatement.accept(Java.java:2621)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1436)
> at org.codehaus.janino.UnitCompiler.access$1600(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1376)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$Block.accept(Java.java:2471)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2228)
> at org.codehaus.janino.UnitCompiler.access$1800(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1378)
> at org.codehaus.janino.UnitCompiler$6.visitIfStatement(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$IfStatement.accept(Java.java:2621)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1436)
> at org.codehaus.janino.UnitCompiler.access$1600(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1376)
> at org.codehaus.janino.UnitCompiler$6.visitBlock(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$Block.accept(Java.java:2471)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1725)
> at org.codehaus.janino.UnitCompiler.access$2100(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$6.visitWhileStatement(UnitCompiler.java:1381)
> at org.codehaus.janino.UnitCompiler$6.visitWhileStatement(UnitCompiler.java:1370)
> at org.codehaus.janino.Java$WhileStatement.accept(Java.java:2708)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
> at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
> at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
> at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
> at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
> at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
> at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
> at org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
> at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
> at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
> at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:935)
> at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:998)
> at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:995)
> at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:890)
> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:357)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
> at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:225)
> at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:272)
> at org.apache.spark.sql.Dataset$$anonfun$collectAsList$1$$anonfun$apply$11.apply(Dataset.scala:2364)
> at org.apache.spark.sql.Dataset$$anonfun$collectAsList$1$$anonfun$apply$11.apply(Dataset.scala:2363)
> at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
> at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2765)
> at org.apache.spark.sql.Dataset$$anonfun$collectAsList$1.apply(Dataset.scala:2363)
> at org.apache.spark.sql.Dataset$$anonfun$collectAsList$1.apply(Dataset.scala:2362)
> at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2778)
> at org.apache.spark.sql.Dataset.collectAsList(Dataset.scala:2362)
> at com.modeln.revvy.calcengine.spark.CMnSparkDataFrameWrapper.repartition(CMnSparkDataFrameWrapper.java:170)
> at com.modeln.revvy.calcengine.spark.handler.CMnJoinOpHandler.process(CMnJoinOpHandler.java:233)
> at com.modeln.revvy.calcengine.spark.handler.CMnJoinOpHandler.process(CMnJoinOpHandler.java:20)
> at com.modeln.revvy.calcengine.spark.handler.CMnOpPipeHandler.process(CMnOpPipeHandler.java:45)
> at com.modeln.revvy.calcengine.spark.handler.CMnOpPipeHandler.process(CMnOpPipeHandler.java:18)
> at com.modeln.revvy.calcengine.spark.handler.CMnSubAssemblyPipeHandler.process(CMnSubAssemblyPipeHandler.java:57)
> at com.modeln.revvy.calcengine.spark.handler.CMnSubAssemblyPipeHandler.process(CMnSubAssemblyPipeHandler.java:16)
> at com.modeln.revvy.calcengine.spark.CMnSparkModelGenerator.generateAndRunSparkModel(CMnSparkModelGenerator.java:173)
> at com.modeln.revvy.calcengine.spark.CMnCalcEngineDriver.main(CMnCalcEngineDriver.java:51)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:637)
> 17/03/16 11:07:04 WARN execution.WholeStageCodegenExec: Whole-stage codegen disabled for this plan:
> *HashAggregate(keys=[], functions=[count(1)], output=[count#4523L])
> +- *HashAggregate(keys=[], functions=[partial_count(1)], output=[count#4714L])
> +- *Project
> +- *SortMergeJoin [SaleId#2724], [right-SaleId#4133], Inner
> :- *Filter isnotnull(SaleId#2724)
> : +- InMemoryTableScan [SaleId#2724], [isnotnull(SaleId#2724)]
> : +- InMemoryRelation [running_total#3481L, invoicePricePerUnit__c#2721, enrollment_account_id#2722, currency__c#2723, SaleId#2724, distributorCustomer__c#2725, projected_tier_attained#3610, purchasingCustomer__c#2726, partNumber__c#2727, saleSubType__c#2728, step2#3520, step3#3541, step1#3500, totalInvoicedAmount__c#2729, overriden_tier_attained#3586, invoiceDate__c#2730, quantity__c#2731L, endCustomer__c#2732, tx_group_count#2733L, quoteId__c#2734, loadDate__c#2735, calc_tier_attained#3563], true, 10000, StorageLevel(disk, memory, deserialized, 1 replicas)
> : +- *Project [running_total#3481L, invoicePricePerUnit__c#2721, enrollment_account_id#2722, currency__c#2723, SaleId#2724, distributorCustomer__c#2725, CASE WHEN (CASE WHEN (quantity__c#2731L > 0) THEN cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(38,1)) ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 250000)) THEN CASE WHEN (running_total#3481L <= 250000) THEN cast(CheckOverflow((250000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(38,1)) ELSE cast(cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(38,0)) as decimal(38,1)) END ELSE 0.0 END END > 0.0) THEN 3 WHEN (CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 50000) THEN CheckOverflow((cast(CheckOverflow((250000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 200000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 250000) THEN 0.0 ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 50000)) THEN CASE WHEN (running_total#3481L <= 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN -200000.0 ELSE CheckOverflow((50000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END > 0.0) THEN 2 WHEN (CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 0) THEN CheckOverflow((cast(CheckOverflow((50000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 50000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 50000) THEN 0.0 ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 0) THEN CASE WHEN (running_total#3481L <= 0) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN -50000.0 ELSE CheckOverflow((0.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END > 0.0) THEN 1 ELSE -1 END AS projected_tier_attained#3610, purchasingCustomer__c#2726, partNumber__c#2727, saleSubType__c#2728, CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 50000) THEN CheckOverflow((cast(CheckOverflow((250000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 200000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 250000) THEN 0.0 ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 50000)) THEN CASE WHEN (running_total#3481L <= 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN -200000.0 ELSE CheckOverflow((50000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END AS step2#3520, CASE WHEN (quantity__c#2731L > 0) THEN cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(38,1)) ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 250000)) THEN CASE WHEN (running_total#3481L <= 250000) THEN cast(CheckOverflow((250000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(38,1)) ELSE cast(cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(38,0)) as decimal(38,1)) END ELSE 0.0 END END AS step3#3541, CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 0) THEN CheckOverflow((cast(CheckOverflow((50000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 50000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 50000) THEN 0.0 ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 0) THEN CASE WHEN (running_total#3481L <= 0) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN -50000.0 ELSE CheckOverflow((0.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END AS step1#3500, totalInvoicedAmount__c#2729, CASE WHEN (CASE WHEN (quantity__c#2731L > 0) THEN cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(38,1)) ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 250000)) THEN CASE WHEN (running_total#3481L <= 250000) THEN cast(CheckOverflow((250000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(38,1)) ELSE cast(cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(38,0)) as decimal(38,1)) END ELSE 0.0 END END > 0.0) THEN 3 WHEN (CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 50000) THEN CheckOverflow((cast(CheckOverflow((250000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 200000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 250000) THEN 0.0 ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 50000)) THEN CASE WHEN (running_total#3481L <= 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN -200000.0 ELSE CheckOverflow((50000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END > 0.0) THEN 2 WHEN (CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 0) THEN CheckOverflow((cast(CheckOverflow((50000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 50000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 50000) THEN 0.0 ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 0) THEN CASE WHEN (running_total#3481L <= 0) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN -50000.0 ELSE CheckOverflow((0.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END > 0.0) THEN 1 ELSE -1 END AS overriden_tier_attained#3586, invoiceDate__c#2730, quantity__c#2731L, endCustomer__c#2732, tx_group_count#2733L, quoteId__c#2734, loadDate__c#2735, CASE WHEN (CASE WHEN (quantity__c#2731L > 0) THEN cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(38,1)) ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 250000)) THEN CASE WHEN (running_total#3481L <= 250000) THEN cast(CheckOverflow((250000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(38,1)) ELSE cast(cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(38,0)) as decimal(38,1)) END ELSE 0.0 END END > 0.0) THEN 3 WHEN (CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 250000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 50000) THEN CheckOverflow((cast(CheckOverflow((250000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 200000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 250000) THEN 0.0 ELSE CASE WHEN (((running_total#3481L - quantity__c#2731L) > 0) && ((running_total#3481L - quantity__c#2731L) > 50000)) THEN CASE WHEN (running_total#3481L <= 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN -200000.0 ELSE CheckOverflow((50000.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 250000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 250000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END > 0.0) THEN 2 WHEN (CASE WHEN (quantity__c#2731L > 0) THEN CASE WHEN (running_total#3481L > 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) < 50000) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) >= 0) THEN CheckOverflow((cast(CheckOverflow((50000.0 - cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) as decimal(23,1)) + cast(cast(quantity__c#2731L as decimal(22,1)) as decimal(23,1))), DecimalType(23,1)) ELSE 50000.0 END ELSE 0.0 END ELSE cast(CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) >= cast(quantity__c#2731L as decimal(22,1))) THEN cast(quantity__c#2731L as decimal(22,1)) ELSE CASE WHEN (CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) > 0.0) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 0.0), DecimalType(22,1)) ELSE 0.0 END END as decimal(23,1)) END ELSE cast(CASE WHEN (running_total#3481L > 50000) THEN 0.0 ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 0) THEN CASE WHEN (running_total#3481L <= 0) THEN CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN -50000.0 ELSE CheckOverflow((0.0 - cast(cast((running_total#3481L - quantity__c#2731L) as decimal(20,0)) as decimal(22,1))), DecimalType(22,1)) END ELSE CASE WHEN ((running_total#3481L - quantity__c#2731L) > 50000) THEN CheckOverflow((cast(cast(running_total#3481L as decimal(20,0)) as decimal(22,1)) - 50000.0), DecimalType(22,1)) ELSE cast((running_total#3481L - (running_total#3481L - quantity__c#2731L)) as decimal(22,1)) END END ELSE 0.0 END END as decimal(23,1)) END > 0.0) THEN 1 ELSE -1 END AS calc_tier_attained#3563]
> : +- Window [sum(measure_expr#3462L) windowspecdefinition(SaleId#2724 ASC NULLS FIRST, ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS running_total#3481L], [SaleId#2724 ASC NULLS FIRST]
> : +- *Sort [SaleId#2724 ASC NULLS FIRST], false, 0
> : +- *Project [invoicePricePerUnit__c#2721, enrollment_account_id#2722, currency__c#2723, SaleId#2724, distributorCustomer__c#2725, purchasingCustomer__c#2726, partNumber__c#2727, saleSubType__c#2728, totalInvoicedAmount__c#2729, invoiceDate__c#2730, quantity__c#2731L, endCustomer__c#2732, tx_group_count#2733L, quoteId__c#2734, loadDate__c#2735, quantity__c#2731L AS measure_expr#3462L]
> : +- InMemoryTableScan [distributorCustomer__c#2725, endCustomer__c#2732, SaleId#2724, quoteId__c#2734, partNumber__c#2727, currency__c#2723, enrollment_account_id#2722, invoicePricePerUnit__c#2721, invoiceDate__c#2730, totalInvoicedAmount__c#2729, tx_group_count#2733L, loadDate__c#2735, saleSubType__c#2728, quantity__c#2731L, purchasingCustomer__c#2726]
> : +- InMemoryRelation [invoicePricePerUnit__c#2721, enrollment_account_id#2722, currency__c#2723, SaleId#2724, distributorCustomer__c#2725, purchasingCustomer__c#2726, partNumber__c#2727, saleSubType__c#2728, totalInvoicedAmount__c#2729, invoiceDate__c#2730, quantity__c#2731L, endCustomer__c#2732, tx_group_count#2733L, quoteId__c#2734, loadDate__c#2735], true, 10000, StorageLevel(disk, memory, deserialized, 1 replicas)
> : +- Coalesce 1
> : +- Scan ExistingRDD[invoicePricePerUnit__c#2721,enrollment_account_id#2722,currency__c#2723,SaleId#2724,distributorCustomer__c#2725,purchasingCustomer__c#2726,partNumber__c#2727,saleSubType__c#2728,totalInvoicedAmount__c#2729,invoiceDate__c#2730,quantity__c#2731L,endCustomer__c#2732,tx_group_count#2733L,quoteId__c#2734,loadDate__c#2735]
> +- *Sort [right-SaleId#4133 ASC NULLS FIRST], false, 0
> +- *Project [SaleId#2068 AS right-SaleId#4133]
> +- *Filter isnotnull(SaleId#2068)
> +- InMemoryTableScan [SaleId#2068], [isnotnull(SaleId#2068)]
> +- InMemoryRelation [invoicePricePerUnit__c#2065, enrollment_account_id#2066, currency__c#2067, SaleId#2068, distributorCustomer__c#2069, purchasingCustomer__c#2070, partNumber__c#2071, saleSubType__c#2072, totalInvoicedAmount__c#2073, invoiceDate__c#2074, quantity__c#2075L, endCustomer__c#2076, tx_group_count#2077L, quoteId__c#2078, loadDate__c#2079], true, 10000, StorageLevel(disk, memory, deserialized, 1 replicas)
> +- Coalesce 1
> +- Scan ExistingRDD[invoicePricePerUnit__c#2065,enrollment_account_id#2066,currency__c#2067,SaleId#2068,distributorCustomer__c#2069,purchasingCustomer__c#2070,partNumber__c#2071,saleSubType__c#2072,totalInvoicedAmount__c#2073,invoiceDate__c#2074,quantity__c#2075L,endCustomer__c#2076,tx_group_count#2077L,quoteId__c#2078,loadDate__c#2079]
> {code}
--
This message was sent by Atlassian Jira
(v8.3.4#803005)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org