You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by VG <vl...@gmail.com> on 2016/07/17 16:57:14 UTC

Error in using filter when using dataset API in java

Hello All,

I am having a hard time using the dataset API. Any suggestions where I am
going wrong.
I am using 2.0.0-preview

When I try to call a filter operation like this
Dataset<ApacheAccessLog> fds = logSet.filter(log->
log.getResponseCode()!=200);
fds.count();

I get the following error.
16/07/17 22:19:58 ERROR CodeGenerator: failed to compile:
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line
98, Column 63: No applicable constructor/method found for zero actual
parameters; candidates are: "test1.ApacheAccessLog(java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String)"
/* 001 */ public Object generate(Object[] references) {
/* 002 */   return new GeneratedIterator(references);
/* 003 */ }
/* 004 */
/* 005 */ /** Codegened pipeline for:
/* 006 */ * TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
/* 007 */ +- Project
/* 008 */ +- Filter test...
/* 009 */ */
/* 010 */ final class GeneratedIterator extends
org.apache.spark.sql.execution.BufferedRowIterator {
/* 011 */   private Object[] references;
/* 012 */   private boolean agg_initAgg;
/* 013 */   private boolean agg_bufIsNull;
/* 014 */   private long agg_bufValue;
/* 015 */   private scala.collection.Iterator inputadapter_input;
/* 016 */   private org.apache.spark.sql.execution.metric.SQLMetric
filter_numOutputRows;
/* 017 */   private java.lang.String filter_errMsg;
/* 018 */   private java.lang.String filter_errMsg1;
/* 019 */   private UnsafeRow filter_result;
/* 020 */   private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder
filter_holder;
/* 021 */   private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
filter_rowWriter;
/* 022 */   private org.apache.spark.sql.execution.metric.SQLMetric
agg_numOutputRows;
/* 023 */   private org.apache.spark.sql.execution.metric.SQLMetric
agg_aggTime;
/* 024 */   private UnsafeRow agg_result;
/* 025 */   private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
/* 026 */   private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
agg_rowWriter;
/* 027 */
/* 028 */   public GeneratedIterator(Object[] references) {
/* 029 */     this.references = references;
/* 030 */   }
/* 031 */
/* 032 */   public void init(int index, scala.collection.Iterator inputs[])
{
/* 033 */     partitionIndex = index;
/* 034 */     agg_initAgg = false;
/* 035 */
/* 036 */     inputadapter_input = inputs[0];
/* 037 */     this.filter_numOutputRows =
(org.apache.spark.sql.execution.metric.SQLMetric) references[0];
/* 038 */     this.filter_errMsg = (java.lang.String) references[2];
/* 039 */     this.filter_errMsg1 = (java.lang.String) references[3];
/* 040 */     filter_result = new UnsafeRow(9);
/* 041 */     this.filter_holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(filter_result,
224);
/* 042 */     this.filter_rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_holder,
9);
/* 043 */     this.agg_numOutputRows =
(org.apache.spark.sql.execution.metric.SQLMetric) references[4];
/* 044 */     this.agg_aggTime =
(org.apache.spark.sql.execution.metric.SQLMetric) references[5];
/* 045 */     agg_result = new UnsafeRow(1);
/* 046 */     this.agg_holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result,
0);
/* 047 */     this.agg_rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder,
1);
/* 048 */   }
/* 049 */
/* 050 */   private void agg_doAggregateWithoutKey() throws
java.io.IOException {
/* 051 */     // initialize aggregation buffer
/* 052 */     agg_bufIsNull = false;
/* 053 */     agg_bufValue = 0L;
/* 054 */
/* 055 */     /*** PRODUCE: Project */
/* 056 */
/* 057 */     /*** PRODUCE: Filter
test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 058 */
/* 059 */     /*** PRODUCE: INPUT */
/* 060 */
/* 061 */     while (inputadapter_input.hasNext()) {
/* 062 */       InternalRow inputadapter_row = (InternalRow)
inputadapter_input.next();
/* 063 */       /*** CONSUME: Filter
test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 064 */
/* 065 */       /* input[0, string] */
/* 066 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
/* 067 */       UTF8String inputadapter_value = inputadapter_isNull ? null
: (inputadapter_row.getUTF8String(0));
/* 068 */       /* input[1, bigint] */
/* 069 */       long inputadapter_value1 = inputadapter_row.getLong(1);
/* 070 */       /* input[2, string] */
/* 071 */       boolean inputadapter_isNull2 = inputadapter_row.isNullAt(2);
/* 072 */       UTF8String inputadapter_value2 = inputadapter_isNull2 ?
null : (inputadapter_row.getUTF8String(2));
/* 073 */       /* input[3, string] */
/* 074 */       boolean inputadapter_isNull3 = inputadapter_row.isNullAt(3);
/* 075 */       UTF8String inputadapter_value3 = inputadapter_isNull3 ?
null : (inputadapter_row.getUTF8String(3));
/* 076 */       /* input[4, string] */
/* 077 */       boolean inputadapter_isNull4 = inputadapter_row.isNullAt(4);
/* 078 */       UTF8String inputadapter_value4 = inputadapter_isNull4 ?
null : (inputadapter_row.getUTF8String(4));
/* 079 */       /* input[5, string] */
/* 080 */       boolean inputadapter_isNull5 = inputadapter_row.isNullAt(5);
/* 081 */       UTF8String inputadapter_value5 = inputadapter_isNull5 ?
null : (inputadapter_row.getUTF8String(5));
/* 082 */       /* input[6, string] */
/* 083 */       boolean inputadapter_isNull6 = inputadapter_row.isNullAt(6);
/* 084 */       UTF8String inputadapter_value6 = inputadapter_isNull6 ?
null : (inputadapter_row.getUTF8String(6));
/* 085 */       /* input[7, int] */
/* 086 */       int inputadapter_value7 = inputadapter_row.getInt(7);
/* 087 */       /* input[8, string] */
/* 088 */       boolean inputadapter_isNull8 = inputadapter_row.isNullAt(8);
/* 089 */       UTF8String inputadapter_value8 = inputadapter_isNull8 ?
null : (inputadapter_row.getUTF8String(8));
/* 090 */
/* 091 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 092 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec */
/* 093 */       /* expression: test1.Test1$$Lambda$5/454547627@6cdbe5ec */
/* 094 */       Object filter_obj = ((Expression) references[1]).eval(null);
/* 095 */       org.apache.spark.api.java.function.FilterFunction
filter_value1 = (org.apache.spark.api.java.function.FilterFunction)
filter_obj;
/* 096 */       /* initializejavabean(newInstance(class
test1.ApacheAccessLog), (setDateTimeString,input[2, string].toString),
(setResponseCode,ass... */
/* 097 */       /* newInstance(class test1.ApacheAccessLog) */
/* 098 */       final test1.ApacheAccessLog filter_value3 = false ? null :
new test1.ApacheAccessLog();
/* 099 */       if (!false) {
/* 100 */         /* input[2, string].toString */
/* 101 */         boolean filter_isNull4 = inputadapter_isNull2;
/* 102 */         final java.lang.String filter_value4 = filter_isNull4 ?
null : (java.lang.String) inputadapter_value2.toString();
/* 103 */         filter_isNull4 = filter_value4 == null;
/* 104 */         filter_value3.setDateTimeString(filter_value4);
/* 105 */
/* 106 */         /* assertnotnull(input[7, int], currently no type path
record in java) */
/* 107 */         if (false) {
/* 108 */           throw new RuntimeException(this.filter_errMsg);
/* 109 */         }
/* 110 */         filter_value3.setResponseCode(inputadapter_value7);
/* 111 */
/* 112 */         /* input[0, string].toString */
/* 113 */         boolean filter_isNull8 = inputadapter_isNull;
/* 114 */         final java.lang.String filter_value8 = filter_isNull8 ?
null : (java.lang.String) inputadapter_value.toString();
/* 115 */         filter_isNull8 = filter_value8 == null;
/* 116 */         filter_value3.setClientIdentd(filter_value8);
/* 117 */
/* 118 */         /* input[3, string].toString */
/* 119 */         boolean filter_isNull10 = inputadapter_isNull3;
/* 120 */         final java.lang.String filter_value10 = filter_isNull10 ?
null : (java.lang.String) inputadapter_value3.toString();
/* 121 */         filter_isNull10 = filter_value10 == null;
/* 122 */         filter_value3.setEndpoint(filter_value10);
/* 123 */
/* 124 */         /* input[4, string].toString */
/* 125 */         boolean filter_isNull12 = inputadapter_isNull4;
/* 126 */         final java.lang.String filter_value12 = filter_isNull12 ?
null : (java.lang.String) inputadapter_value4.toString();
/* 127 */         filter_isNull12 = filter_value12 == null;
/* 128 */         filter_value3.setIpAddress(filter_value12);
/* 129 */
/* 130 */         /* input[5, string].toString */
/* 131 */         boolean filter_isNull14 = inputadapter_isNull5;
/* 132 */         final java.lang.String filter_value14 = filter_isNull14 ?
null : (java.lang.String) inputadapter_value5.toString();
/* 133 */         filter_isNull14 = filter_value14 == null;
/* 134 */         filter_value3.setMethod(filter_value14);
/* 135 */
/* 136 */         /* input[6, string].toString */
/* 137 */         boolean filter_isNull16 = inputadapter_isNull6;
/* 138 */         final java.lang.String filter_value16 = filter_isNull16 ?
null : (java.lang.String) inputadapter_value6.toString();
/* 139 */         filter_isNull16 = filter_value16 == null;
/* 140 */         filter_value3.setProtocol(filter_value16);
/* 141 */
/* 142 */         /* assertnotnull(input[1, bigint], currently no type path
record in java) */
/* 143 */         if (false) {
/* 144 */           throw new RuntimeException(this.filter_errMsg1);
/* 145 */         }
/* 146 */         filter_value3.setContentSize(inputadapter_value1);
/* 147 */
/* 148 */         /* input[8, string].toString */
/* 149 */         boolean filter_isNull20 = inputadapter_isNull8;
/* 150 */         final java.lang.String filter_value20 = filter_isNull20 ?
null : (java.lang.String) inputadapter_value8.toString();
/* 151 */         filter_isNull20 = filter_value20 == null;
/* 152 */         filter_value3.setUserID(filter_value20);
/* 153 */
/* 154 */       }
/* 155 */       boolean filter_isNull = false || false;
/* 156 */
/* 157 */       boolean filter_value = false;
/* 158 */       try {
/* 159 */         filter_value = filter_isNull ? false :
filter_value1.call(filter_value3);
/* 160 */       } catch (Exception e) {
/* 161 */         org.apache.spark.unsafe.Platform.throwException(e);
/* 162 */       }
/* 163 */       if (filter_isNull || !filter_value) continue;
/* 164 */
/* 165 */       filter_numOutputRows.add(1);
/* 166 */
/* 167 */       /*** CONSUME: Project */
/* 168 */
/* 169 */       /*** CONSUME: TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
/* 170 */
/* 171 */       // do aggregate
/* 172 */       // common sub-expressions
/* 173 */
/* 174 */       // evaluate aggregate function
/* 175 */       /* (input[0, bigint] + 1) */
/* 176 */       long agg_value1 = -1L;
/* 177 */       agg_value1 = agg_bufValue + 1L;
/* 178 */       // update aggregation buffer
/* 179 */       agg_bufIsNull = false;
/* 180 */       agg_bufValue = agg_value1;
/* 181 */       if (shouldStop()) return;
/* 182 */     }
/* 183 */
/* 184 */   }
/* 185 */
/* 186 */   protected void processNext() throws java.io.IOException {
/* 187 */     /*** PRODUCE: TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
/* 188 */
/* 189 */     while (!agg_initAgg) {
/* 190 */       agg_initAgg = true;
/* 191 */       long agg_beforeAgg = System.nanoTime();
/* 192 */       agg_doAggregateWithoutKey();
/* 193 */       agg_aggTime.add((System.nanoTime() - agg_beforeAgg) /
1000000);
/* 194 */
/* 195 */       // output the result
/* 196 */
/* 197 */       agg_numOutputRows.add(1);
/* 198 */       /*** CONSUME: WholeStageCodegen */
/* 199 */
/* 200 */       agg_rowWriter.zeroOutNullBytes();
/* 201 */
/* 202 */       if (agg_bufIsNull) {
/* 203 */         agg_rowWriter.setNullAt(0);
/* 204 */       } else {
/* 205 */         agg_rowWriter.write(0, agg_bufValue);
/* 206 */       }
/* 207 */       append(agg_result);
/* 208 */     }
/* 209 */   }
/* 210 */ }

org.codehaus.commons.compiler.CompileException: File 'generated.java', Line
98, Column 63: No applicable constructor/method found for zero actual
parameters; candidates are: "test1.ApacheAccessLog(java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String)"
at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10174)
at
org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:7559)
at
org.codehaus.janino.UnitCompiler.invokeConstructor(UnitCompiler.java:6505)
at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4126)
at org.codehaus.janino.UnitCompiler.access$7600(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$10.visitNewClassInstance(UnitCompiler.java:3275)
at org.codehaus.janino.Java$NewClassInstance.accept(Java.java:4085)
at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3560)
at org.codehaus.janino.UnitCompiler.access$6600(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$10.visitConditionalExpression(UnitCompiler.java:3260)
at org.codehaus.janino.Java$ConditionalExpression.accept(Java.java:3441)
at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1845)
at org.codehaus.janino.UnitCompiler.access$2000(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$4.visitLocalVariableDeclarationStatement(UnitCompiler.java:945)
at
org.codehaus.janino.Java$LocalVariableDeclarationStatement.accept(Java.java:2508)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
at
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:993)
at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:185)
at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:935)
at org.codehaus.janino.Java$Block.accept(Java.java:2012)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1273)
at org.codehaus.janino.UnitCompiler.access$1500(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$4.visitWhileStatement(UnitCompiler.java:940)
at org.codehaus.janino.Java$WhileStatement.accept(Java.java:2244)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
at
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2293)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:822)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:794)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:507)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
at
org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
at
org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
at
org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:800)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:825)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:822)
at
org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
at
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
at
org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
at
org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
at
org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:764)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:338)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
at
org.apache.spark.sql.execution.aggregate.TungstenAggregate.inputRDDs(TungstenAggregate.scala:134)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240)
at
org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:287)
at
org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2122)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2436)
at org.apache.spark.sql.Dataset.org
$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2121)
at org.apache.spark.sql.Dataset.org
$apache$spark$sql$Dataset$$collect(Dataset.scala:2128)
at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2156)
at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2155)
at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2449)
at org.apache.spark.sql.Dataset.count(Dataset.scala:2155)
at test1.Test1.main(Test1.java:42)
Exception in thread "main"
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute,
tree:
Exchange SinglePartition, None
+- WholeStageCodegen
   :  +- TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
   :     +- Project
   :        +- Filter test1.Test1$$Lambda$5/454547627@6cdbe5ec.call
   :           +- INPUT
   +- LocalTableScan
[clientIdentd#26,contentSize#27L,dateTimeString#28,endpoint#29,ipAddress#30,method#31,protocol#32,responseCode#33,userID#34],
[[0,5000000001,4d2,580000001a,7800000046,c000000007,c800000003,d000000008,c8,d800000001,2d,322f6c754a2f3132,303a30313a343130,38302d2030303a30,3030,726574706168632f,732f6176616a2f31,2f6e69616d2f6372,6d6f632f6176616a,697262617461642f,737070612f736b63,6f4c2f73676f6c2f,657a796c616e4167,6176616a2e72,312e312e312e31,544547,312e312f50545448,2d],[0,5000000001,7d0,580000001a,780000004f,c800000007,d000000003,d800000008,c8,e000000001,2d,322f6c754a2f3132,303a30313a343130,38302d2030303a30,3030,726574706168632f,732f6176616a2f31,2f6e69616d2f6372,6d6f632f6176616a,697262617461642f,737070612f736b63,6f4c2f73676f6c2f,657a796c616e4167,696d616572745372,6176616a2e676e,312e312e312e31,544547,312e312f50545448,2d],[0,5000000001,64,580000001a,7800000040,b800000007,c000000003,c800000008,191,d000000001,2d,322f6c754a2f3132,303a30313a343130,38302d2030303a30,3030,726574706168632f,732f6176616a2f31,2f6e69616d2f6372,6d6f632f6176616a,697262617461642f,737070612f736b63,72452f73676f6c2f,6176616a2e726f72,312e312e312e31,544547,312e312f50545448,2d]]

at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:50)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
at
org.apache.spark.sql.execution.aggregate.TungstenAggregate.inputRDDs(TungstenAggregate.scala:134)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240)
at
org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:287)
at
org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2122)
at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2436)
at org.apache.spark.sql.Dataset.org
$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2121)
at org.apache.spark.sql.Dataset.org
$apache$spark$sql$Dataset$$collect(Dataset.scala:2128)
at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2156)
at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2155)
at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2449)
at org.apache.spark.sql.Dataset.count(Dataset.scala:2155)
at test1.Test1.main(Test1.java:42)
Caused by: java.util.concurrent.ExecutionException: java.lang.Exception:
failed to compile: org.codehaus.commons.compiler.CompileException: File
'generated.java', Line 98, Column 63: No applicable constructor/method
found for zero actual parameters; candidates are:
"test1.ApacheAccessLog(java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String)"
/* 001 */ public Object generate(Object[] references) {
/* 002 */   return new GeneratedIterator(references);
/* 003 */ }
/* 004 */
/* 005 */ /** Codegened pipeline for:
/* 006 */ * TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
/* 007 */ +- Project
/* 008 */ +- Filter test...
/* 009 */ */
/* 010 */ final class GeneratedIterator extends
org.apache.spark.sql.execution.BufferedRowIterator {
/* 011 */   private Object[] references;
/* 012 */   private boolean agg_initAgg;
/* 013 */   private boolean agg_bufIsNull;
/* 014 */   private long agg_bufValue;
/* 015 */   private scala.collection.Iterator inputadapter_input;
/* 016 */   private org.apache.spark.sql.execution.metric.SQLMetric
filter_numOutputRows;
/* 017 */   private java.lang.String filter_errMsg;
/* 018 */   private java.lang.String filter_errMsg1;
/* 019 */   private UnsafeRow filter_result;
/* 020 */   private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder
filter_holder;
/* 021 */   private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
filter_rowWriter;
/* 022 */   private org.apache.spark.sql.execution.metric.SQLMetric
agg_numOutputRows;
/* 023 */   private org.apache.spark.sql.execution.metric.SQLMetric
agg_aggTime;
/* 024 */   private UnsafeRow agg_result;
/* 025 */   private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
/* 026 */   private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
agg_rowWriter;
/* 027 */
/* 028 */   public GeneratedIterator(Object[] references) {
/* 029 */     this.references = references;
/* 030 */   }
/* 031 */
/* 032 */   public void init(int index, scala.collection.Iterator inputs[])
{
/* 033 */     partitionIndex = index;
/* 034 */     agg_initAgg = false;
/* 035 */
/* 036 */     inputadapter_input = inputs[0];
/* 037 */     this.filter_numOutputRows =
(org.apache.spark.sql.execution.metric.SQLMetric) references[0];
/* 038 */     this.filter_errMsg = (java.lang.String) references[2];
/* 039 */     this.filter_errMsg1 = (java.lang.String) references[3];
/* 040 */     filter_result = new UnsafeRow(9);
/* 041 */     this.filter_holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(filter_result,
224);
/* 042 */     this.filter_rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_holder,
9);
/* 043 */     this.agg_numOutputRows =
(org.apache.spark.sql.execution.metric.SQLMetric) references[4];
/* 044 */     this.agg_aggTime =
(org.apache.spark.sql.execution.metric.SQLMetric) references[5];
/* 045 */     agg_result = new UnsafeRow(1);
/* 046 */     this.agg_holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result,
0);
/* 047 */     this.agg_rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder,
1);
/* 048 */   }
/* 049 */
/* 050 */   private void agg_doAggregateWithoutKey() throws
java.io.IOException {
/* 051 */     // initialize aggregation buffer
/* 052 */     agg_bufIsNull = false;
/* 053 */     agg_bufValue = 0L;
/* 054 */
/* 055 */     /*** PRODUCE: Project */
/* 056 */
/* 057 */     /*** PRODUCE: Filter
test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 058 */
/* 059 */     /*** PRODUCE: INPUT */
/* 060 */
/* 061 */     while (inputadapter_input.hasNext()) {
/* 062 */       InternalRow inputadapter_row = (InternalRow)
inputadapter_input.next();
/* 063 */       /*** CONSUME: Filter
test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 064 */
/* 065 */       /* input[0, string] */
/* 066 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
/* 067 */       UTF8String inputadapter_value = inputadapter_isNull ? null
: (inputadapter_row.getUTF8String(0));
/* 068 */       /* input[1, bigint] */
/* 069 */       long inputadapter_value1 = inputadapter_row.getLong(1);
/* 070 */       /* input[2, string] */
/* 071 */       boolean inputadapter_isNull2 = inputadapter_row.isNullAt(2);
/* 072 */       UTF8String inputadapter_value2 = inputadapter_isNull2 ?
null : (inputadapter_row.getUTF8String(2));
/* 073 */       /* input[3, string] */
/* 074 */       boolean inputadapter_isNull3 = inputadapter_row.isNullAt(3);
/* 075 */       UTF8String inputadapter_value3 = inputadapter_isNull3 ?
null : (inputadapter_row.getUTF8String(3));
/* 076 */       /* input[4, string] */
/* 077 */       boolean inputadapter_isNull4 = inputadapter_row.isNullAt(4);
/* 078 */       UTF8String inputadapter_value4 = inputadapter_isNull4 ?
null : (inputadapter_row.getUTF8String(4));
/* 079 */       /* input[5, string] */
/* 080 */       boolean inputadapter_isNull5 = inputadapter_row.isNullAt(5);
/* 081 */       UTF8String inputadapter_value5 = inputadapter_isNull5 ?
null : (inputadapter_row.getUTF8String(5));
/* 082 */       /* input[6, string] */
/* 083 */       boolean inputadapter_isNull6 = inputadapter_row.isNullAt(6);
/* 084 */       UTF8String inputadapter_value6 = inputadapter_isNull6 ?
null : (inputadapter_row.getUTF8String(6));
/* 085 */       /* input[7, int] */
/* 086 */       int inputadapter_value7 = inputadapter_row.getInt(7);
/* 087 */       /* input[8, string] */
/* 088 */       boolean inputadapter_isNull8 = inputadapter_row.isNullAt(8);
/* 089 */       UTF8String inputadapter_value8 = inputadapter_isNull8 ?
null : (inputadapter_row.getUTF8String(8));
/* 090 */
/* 091 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 092 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec */
/* 093 */       /* expression: test1.Test1$$Lambda$5/454547627@6cdbe5ec */
/* 094 */       Object filter_obj = ((Expression) references[1]).eval(null);
/* 095 */       org.apache.spark.api.java.function.FilterFunction
filter_value1 = (org.apache.spark.api.java.function.FilterFunction)
filter_obj;
/* 096 */       /* initializejavabean(newInstance(class
test1.ApacheAccessLog), (setDateTimeString,input[2, string].toString),
(setResponseCode,ass... */
/* 097 */       /* newInstance(class test1.ApacheAccessLog) */
/* 098 */       final test1.ApacheAccessLog filter_value3 = false ? null :
new test1.ApacheAccessLog();
/* 099 */       if (!false) {
/* 100 */         /* input[2, string].toString */
/* 101 */         boolean filter_isNull4 = inputadapter_isNull2;
/* 102 */         final java.lang.String filter_value4 = filter_isNull4 ?
null : (java.lang.String) inputadapter_value2.toString();
/* 103 */         filter_isNull4 = filter_value4 == null;
/* 104 */         filter_value3.setDateTimeString(filter_value4);
/* 105 */
/* 106 */         /* assertnotnull(input[7, int], currently no type path
record in java) */
/* 107 */         if (false) {
/* 108 */           throw new RuntimeException(this.filter_errMsg);
/* 109 */         }
/* 110 */         filter_value3.setResponseCode(inputadapter_value7);
/* 111 */
/* 112 */         /* input[0, string].toString */
/* 113 */         boolean filter_isNull8 = inputadapter_isNull;
/* 114 */         final java.lang.String filter_value8 = filter_isNull8 ?
null : (java.lang.String) inputadapter_value.toString();
/* 115 */         filter_isNull8 = filter_value8 == null;
/* 116 */         filter_value3.setClientIdentd(filter_value8);
/* 117 */
/* 118 */         /* input[3, string].toString */
/* 119 */         boolean filter_isNull10 = inputadapter_isNull3;
/* 120 */         final java.lang.String filter_value10 = filter_isNull10 ?
null : (java.lang.String) inputadapter_value3.toString();
/* 121 */         filter_isNull10 = filter_value10 == null;
/* 122 */         filter_value3.setEndpoint(filter_value10);
/* 123 */
/* 124 */         /* input[4, string].toString */
/* 125 */         boolean filter_isNull12 = inputadapter_isNull4;
/* 126 */         final java.lang.String filter_value12 = filter_isNull12 ?
null : (java.lang.String) inputadapter_value4.toString();
/* 127 */         filter_isNull12 = filter_value12 == null;
/* 128 */         filter_value3.setIpAddress(filter_value12);
/* 129 */
/* 130 */         /* input[5, string].toString */
/* 131 */         boolean filter_isNull14 = inputadapter_isNull5;
/* 132 */         final java.lang.String filter_value14 = filter_isNull14 ?
null : (java.lang.String) inputadapter_value5.toString();
/* 133 */         filter_isNull14 = filter_value14 == null;
/* 134 */         filter_value3.setMethod(filter_value14);
/* 135 */
/* 136 */         /* input[6, string].toString */
/* 137 */         boolean filter_isNull16 = inputadapter_isNull6;
/* 138 */         final java.lang.String filter_value16 = filter_isNull16 ?
null : (java.lang.String) inputadapter_value6.toString();
/* 139 */         filter_isNull16 = filter_value16 == null;
/* 140 */         filter_value3.setProtocol(filter_value16);
/* 141 */
/* 142 */         /* assertnotnull(input[1, bigint], currently no type path
record in java) */
/* 143 */         if (false) {
/* 144 */           throw new RuntimeException(this.filter_errMsg1);
/* 145 */         }
/* 146 */         filter_value3.setContentSize(inputadapter_value1);
/* 147 */
/* 148 */         /* input[8, string].toString */
/* 149 */         boolean filter_isNull20 = inputadapter_isNull8;
/* 150 */         final java.lang.String filter_value20 = filter_isNull20 ?
null : (java.lang.String) inputadapter_value8.toString();
/* 151 */         filter_isNull20 = filter_value20 == null;
/* 152 */         filter_value3.setUserID(filter_value20);
/* 153 */
/* 154 */       }
/* 155 */       boolean filter_isNull = false || false;
/* 156 */
/* 157 */       boolean filter_value = false;
/* 158 */       try {
/* 159 */         filter_value = filter_isNull ? false :
filter_value1.call(filter_value3);
/* 160 */       } catch (Exception e) {
/* 161 */         org.apache.spark.unsafe.Platform.throwException(e);
/* 162 */       }
/* 163 */       if (filter_isNull || !filter_value) continue;
/* 164 */
/* 165 */       filter_numOutputRows.add(1);
/* 166 */
/* 167 */       /*** CONSUME: Project */
/* 168 */
/* 169 */       /*** CONSUME: TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
/* 170 */
/* 171 */       // do aggregate
/* 172 */       // common sub-expressions
/* 173 */
/* 174 */       // evaluate aggregate function
/* 175 */       /* (input[0, bigint] + 1) */
/* 176 */       long agg_value1 = -1L;
/* 177 */       agg_value1 = agg_bufValue + 1L;
/* 178 */       // update aggregation buffer
/* 179 */       agg_bufIsNull = false;
/* 180 */       agg_bufValue = agg_value1;
/* 181 */       if (shouldStop()) return;
/* 182 */     }
/* 183 */
/* 184 */   }
/* 185 */
/* 186 */   protected void processNext() throws java.io.IOException {
/* 187 */     /*** PRODUCE: TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
/* 188 */
/* 189 */     while (!agg_initAgg) {
/* 190 */       agg_initAgg = true;
/* 191 */       long agg_beforeAgg = System.nanoTime();
/* 192 */       agg_doAggregateWithoutKey();
/* 193 */       agg_aggTime.add((System.nanoTime() - agg_beforeAgg) /
1000000);
/* 194 */
/* 195 */       // output the result
/* 196 */
/* 197 */       agg_numOutputRows.add(1);
/* 198 */       /*** CONSUME: WholeStageCodegen */
/* 199 */
/* 200 */       agg_rowWriter.zeroOutNullBytes();
/* 201 */
/* 202 */       if (agg_bufIsNull) {
/* 203 */         agg_rowWriter.setNullAt(0);
/* 204 */       } else {
/* 205 */         agg_rowWriter.write(0, agg_bufValue);
/* 206 */       }
/* 207 */       append(agg_result);
/* 208 */     }
/* 209 */   }
/* 210 */ }

at
org.spark_project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
at
org.spark_project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
at
org.spark_project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
at
org.spark_project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
at
org.spark_project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
at
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
at
org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
at
org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
at
org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:764)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:338)
at
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
at
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
at
org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
... 28 more
Caused by: java.lang.Exception: failed to compile:
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line
98, Column 63: No applicable constructor/method found for zero actual
parameters; candidates are: "test1.ApacheAccessLog(java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String)"
/* 001 */ public Object generate(Object[] references) {
/* 002 */   return new GeneratedIterator(references);
/* 003 */ }
/* 004 */
/* 005 */ /** Codegened pipeline for:
/* 006 */ * TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
/* 007 */ +- Project
/* 008 */ +- Filter test...
/* 009 */ */
/* 010 */ final class GeneratedIterator extends
org.apache.spark.sql.execution.BufferedRowIterator {
/* 011 */   private Object[] references;
/* 012 */   private boolean agg_initAgg;
/* 013 */   private boolean agg_bufIsNull;
/* 014 */   private long agg_bufValue;
/* 015 */   private scala.collection.Iterator inputadapter_input;
/* 016 */   private org.apache.spark.sql.execution.metric.SQLMetric
filter_numOutputRows;
/* 017 */   private java.lang.String filter_errMsg;
/* 018 */   private java.lang.String filter_errMsg1;
/* 019 */   private UnsafeRow filter_result;
/* 020 */   private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder
filter_holder;
/* 021 */   private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
filter_rowWriter;
/* 022 */   private org.apache.spark.sql.execution.metric.SQLMetric
agg_numOutputRows;
/* 023 */   private org.apache.spark.sql.execution.metric.SQLMetric
agg_aggTime;
/* 024 */   private UnsafeRow agg_result;
/* 025 */   private
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
/* 026 */   private
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
agg_rowWriter;
/* 027 */
/* 028 */   public GeneratedIterator(Object[] references) {
/* 029 */     this.references = references;
/* 030 */   }
/* 031 */
/* 032 */   public void init(int index, scala.collection.Iterator inputs[])
{
/* 033 */     partitionIndex = index;
/* 034 */     agg_initAgg = false;
/* 035 */
/* 036 */     inputadapter_input = inputs[0];
/* 037 */     this.filter_numOutputRows =
(org.apache.spark.sql.execution.metric.SQLMetric) references[0];
/* 038 */     this.filter_errMsg = (java.lang.String) references[2];
/* 039 */     this.filter_errMsg1 = (java.lang.String) references[3];
/* 040 */     filter_result = new UnsafeRow(9);
/* 041 */     this.filter_holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(filter_result,
224);
/* 042 */     this.filter_rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_holder,
9);
/* 043 */     this.agg_numOutputRows =
(org.apache.spark.sql.execution.metric.SQLMetric) references[4];
/* 044 */     this.agg_aggTime =
(org.apache.spark.sql.execution.metric.SQLMetric) references[5];
/* 045 */     agg_result = new UnsafeRow(1);
/* 046 */     this.agg_holder = new
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result,
0);
/* 047 */     this.agg_rowWriter = new
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder,
1);
/* 048 */   }
/* 049 */
/* 050 */   private void agg_doAggregateWithoutKey() throws
java.io.IOException {
/* 051 */     // initialize aggregation buffer
/* 052 */     agg_bufIsNull = false;
/* 053 */     agg_bufValue = 0L;
/* 054 */
/* 055 */     /*** PRODUCE: Project */
/* 056 */
/* 057 */     /*** PRODUCE: Filter
test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 058 */
/* 059 */     /*** PRODUCE: INPUT */
/* 060 */
/* 061 */     while (inputadapter_input.hasNext()) {
/* 062 */       InternalRow inputadapter_row = (InternalRow)
inputadapter_input.next();
/* 063 */       /*** CONSUME: Filter
test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 064 */
/* 065 */       /* input[0, string] */
/* 066 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
/* 067 */       UTF8String inputadapter_value = inputadapter_isNull ? null
: (inputadapter_row.getUTF8String(0));
/* 068 */       /* input[1, bigint] */
/* 069 */       long inputadapter_value1 = inputadapter_row.getLong(1);
/* 070 */       /* input[2, string] */
/* 071 */       boolean inputadapter_isNull2 = inputadapter_row.isNullAt(2);
/* 072 */       UTF8String inputadapter_value2 = inputadapter_isNull2 ?
null : (inputadapter_row.getUTF8String(2));
/* 073 */       /* input[3, string] */
/* 074 */       boolean inputadapter_isNull3 = inputadapter_row.isNullAt(3);
/* 075 */       UTF8String inputadapter_value3 = inputadapter_isNull3 ?
null : (inputadapter_row.getUTF8String(3));
/* 076 */       /* input[4, string] */
/* 077 */       boolean inputadapter_isNull4 = inputadapter_row.isNullAt(4);
/* 078 */       UTF8String inputadapter_value4 = inputadapter_isNull4 ?
null : (inputadapter_row.getUTF8String(4));
/* 079 */       /* input[5, string] */
/* 080 */       boolean inputadapter_isNull5 = inputadapter_row.isNullAt(5);
/* 081 */       UTF8String inputadapter_value5 = inputadapter_isNull5 ?
null : (inputadapter_row.getUTF8String(5));
/* 082 */       /* input[6, string] */
/* 083 */       boolean inputadapter_isNull6 = inputadapter_row.isNullAt(6);
/* 084 */       UTF8String inputadapter_value6 = inputadapter_isNull6 ?
null : (inputadapter_row.getUTF8String(6));
/* 085 */       /* input[7, int] */
/* 086 */       int inputadapter_value7 = inputadapter_row.getInt(7);
/* 087 */       /* input[8, string] */
/* 088 */       boolean inputadapter_isNull8 = inputadapter_row.isNullAt(8);
/* 089 */       UTF8String inputadapter_value8 = inputadapter_isNull8 ?
null : (inputadapter_row.getUTF8String(8));
/* 090 */
/* 091 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
/* 092 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec */
/* 093 */       /* expression: test1.Test1$$Lambda$5/454547627@6cdbe5ec */
/* 094 */       Object filter_obj = ((Expression) references[1]).eval(null);
/* 095 */       org.apache.spark.api.java.function.FilterFunction
filter_value1 = (org.apache.spark.api.java.function.FilterFunction)
filter_obj;
/* 096 */       /* initializejavabean(newInstance(class
test1.ApacheAccessLog), (setDateTimeString,input[2, string].toString),
(setResponseCode,ass... */
/* 097 */       /* newInstance(class test1.ApacheAccessLog) */
/* 098 */       final test1.ApacheAccessLog filter_value3 = false ? null :
new test1.ApacheAccessLog();
/* 099 */       if (!false) {
/* 100 */         /* input[2, string].toString */
/* 101 */         boolean filter_isNull4 = inputadapter_isNull2;
/* 102 */         final java.lang.String filter_value4 = filter_isNull4 ?
null : (java.lang.String) inputadapter_value2.toString();
/* 103 */         filter_isNull4 = filter_value4 == null;
/* 104 */         filter_value3.setDateTimeString(filter_value4);
/* 105 */
/* 106 */         /* assertnotnull(input[7, int], currently no type path
record in java) */
/* 107 */         if (false) {
/* 108 */           throw new RuntimeException(this.filter_errMsg);
/* 109 */         }
/* 110 */         filter_value3.setResponseCode(inputadapter_value7);
/* 111 */
/* 112 */         /* input[0, string].toString */
/* 113 */         boolean filter_isNull8 = inputadapter_isNull;
/* 114 */         final java.lang.String filter_value8 = filter_isNull8 ?
null : (java.lang.String) inputadapter_value.toString();
/* 115 */         filter_isNull8 = filter_value8 == null;
/* 116 */         filter_value3.setClientIdentd(filter_value8);
/* 117 */
/* 118 */         /* input[3, string].toString */
/* 119 */         boolean filter_isNull10 = inputadapter_isNull3;
/* 120 */         final java.lang.String filter_value10 = filter_isNull10 ?
null : (java.lang.String) inputadapter_value3.toString();
/* 121 */         filter_isNull10 = filter_value10 == null;
/* 122 */         filter_value3.setEndpoint(filter_value10);
/* 123 */
/* 124 */         /* input[4, string].toString */
/* 125 */         boolean filter_isNull12 = inputadapter_isNull4;
/* 126 */         final java.lang.String filter_value12 = filter_isNull12 ?
null : (java.lang.String) inputadapter_value4.toString();
/* 127 */         filter_isNull12 = filter_value12 == null;
/* 128 */         filter_value3.setIpAddress(filter_value12);
/* 129 */
/* 130 */         /* input[5, string].toString */
/* 131 */         boolean filter_isNull14 = inputadapter_isNull5;
/* 132 */         final java.lang.String filter_value14 = filter_isNull14 ?
null : (java.lang.String) inputadapter_value5.toString();
/* 133 */         filter_isNull14 = filter_value14 == null;
/* 134 */         filter_value3.setMethod(filter_value14);
/* 135 */
/* 136 */         /* input[6, string].toString */
/* 137 */         boolean filter_isNull16 = inputadapter_isNull6;
/* 138 */         final java.lang.String filter_value16 = filter_isNull16 ?
null : (java.lang.String) inputadapter_value6.toString();
/* 139 */         filter_isNull16 = filter_value16 == null;
/* 140 */         filter_value3.setProtocol(filter_value16);
/* 141 */
/* 142 */         /* assertnotnull(input[1, bigint], currently no type path
record in java) */
/* 143 */         if (false) {
/* 144 */           throw new RuntimeException(this.filter_errMsg1);
/* 145 */         }
/* 146 */         filter_value3.setContentSize(inputadapter_value1);
/* 147 */
/* 148 */         /* input[8, string].toString */
/* 149 */         boolean filter_isNull20 = inputadapter_isNull8;
/* 150 */         final java.lang.String filter_value20 = filter_isNull20 ?
null : (java.lang.String) inputadapter_value8.toString();
/* 151 */         filter_isNull20 = filter_value20 == null;
/* 152 */         filter_value3.setUserID(filter_value20);
/* 153 */
/* 154 */       }
/* 155 */       boolean filter_isNull = false || false;
/* 156 */
/* 157 */       boolean filter_value = false;
/* 158 */       try {
/* 159 */         filter_value = filter_isNull ? false :
filter_value1.call(filter_value3);
/* 160 */       } catch (Exception e) {
/* 161 */         org.apache.spark.unsafe.Platform.throwException(e);
/* 162 */       }
/* 163 */       if (filter_isNull || !filter_value) continue;
/* 164 */
/* 165 */       filter_numOutputRows.add(1);
/* 166 */
/* 167 */       /*** CONSUME: Project */
/* 168 */
/* 169 */       /*** CONSUME: TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
/* 170 */
/* 171 */       // do aggregate
/* 172 */       // common sub-expressions
/* 173 */
/* 174 */       // evaluate aggregate function
/* 175 */       /* (input[0, bigint] + 1) */
/* 176 */       long agg_value1 = -1L;
/* 177 */       agg_value1 = agg_bufValue + 1L;
/* 178 */       // update aggregation buffer
/* 179 */       agg_bufIsNull = false;
/* 180 */       agg_bufValue = agg_value1;
/* 181 */       if (shouldStop()) return;
/* 182 */     }
/* 183 */
/* 184 */   }
/* 185 */
/* 186 */   protected void processNext() throws java.io.IOException {
/* 187 */     /*** PRODUCE: TungstenAggregate(key=[],
functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
/* 188 */
/* 189 */     while (!agg_initAgg) {
/* 190 */       agg_initAgg = true;
/* 191 */       long agg_beforeAgg = System.nanoTime();
/* 192 */       agg_doAggregateWithoutKey();
/* 193 */       agg_aggTime.add((System.nanoTime() - agg_beforeAgg) /
1000000);
/* 194 */
/* 195 */       // output the result
/* 196 */
/* 197 */       agg_numOutputRows.add(1);
/* 198 */       /*** CONSUME: WholeStageCodegen */
/* 199 */
/* 200 */       agg_rowWriter.zeroOutNullBytes();
/* 201 */
/* 202 */       if (agg_bufIsNull) {
/* 203 */         agg_rowWriter.setNullAt(0);
/* 204 */       } else {
/* 205 */         agg_rowWriter.write(0, agg_bufValue);
/* 206 */       }
/* 207 */       append(agg_result);
/* 208 */     }
/* 209 */   }
/* 210 */ }

at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:805)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:825)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:822)
at
org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
at
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
... 46 more
Caused by: org.codehaus.commons.compiler.CompileException: File
'generated.java', Line 98, Column 63: No applicable constructor/method
found for zero actual parameters; candidates are:
"test1.ApacheAccessLog(java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String, java.lang.String,
java.lang.String, java.lang.String, java.lang.String)"
at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10174)
at
org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:7559)
at
org.codehaus.janino.UnitCompiler.invokeConstructor(UnitCompiler.java:6505)
at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4126)
at org.codehaus.janino.UnitCompiler.access$7600(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$10.visitNewClassInstance(UnitCompiler.java:3275)
at org.codehaus.janino.Java$NewClassInstance.accept(Java.java:4085)
at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3560)
at org.codehaus.janino.UnitCompiler.access$6600(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$10.visitConditionalExpression(UnitCompiler.java:3260)
at org.codehaus.janino.Java$ConditionalExpression.accept(Java.java:3441)
at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1845)
at org.codehaus.janino.UnitCompiler.access$2000(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$4.visitLocalVariableDeclarationStatement(UnitCompiler.java:945)
at
org.codehaus.janino.Java$LocalVariableDeclarationStatement.accept(Java.java:2508)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
at
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:993)
at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:185)
at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:935)
at org.codehaus.janino.Java$Block.accept(Java.java:2012)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1273)
at org.codehaus.janino.UnitCompiler.access$1500(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$4.visitWhileStatement(UnitCompiler.java:940)
at org.codehaus.janino.Java$WhileStatement.accept(Java.java:2244)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
at
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2293)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:822)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:794)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:507)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
at
org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
at
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
at
org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
at
org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
at
org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
at
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:800)
... 50 more
16/07/17 22:19:59 INFO SparkContext: Invoking stop() from shutdown hook

Re: Error in using filter when using dataset API in java

Posted by Ted Yu <yu...@gmail.com>.
Does ApacheAccessLog have an argument less ctor ?

Cheers

On Sun, Jul 17, 2016 at 9:57 AM, VG <vl...@gmail.com> wrote:

> Hello All,
>
> I am having a hard time using the dataset API. Any suggestions where I am
> going wrong.
> I am using 2.0.0-preview
>
> When I try to call a filter operation like this
> Dataset<ApacheAccessLog> fds = logSet.filter(log->
> log.getResponseCode()!=200);
> fds.count();
>
> I get the following error.
> 16/07/17 22:19:58 ERROR CodeGenerator: failed to compile:
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line
> 98, Column 63: No applicable constructor/method found for zero actual
> parameters; candidates are: "test1.ApacheAccessLog(java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String)"
> /* 001 */ public Object generate(Object[] references) {
> /* 002 */   return new GeneratedIterator(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ /** Codegened pipeline for:
> /* 006 */ * TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
> /* 007 */ +- Project
> /* 008 */ +- Filter test...
> /* 009 */ */
> /* 010 */ final class GeneratedIterator extends
> org.apache.spark.sql.execution.BufferedRowIterator {
> /* 011 */   private Object[] references;
> /* 012 */   private boolean agg_initAgg;
> /* 013 */   private boolean agg_bufIsNull;
> /* 014 */   private long agg_bufValue;
> /* 015 */   private scala.collection.Iterator inputadapter_input;
> /* 016 */   private org.apache.spark.sql.execution.metric.SQLMetric
> filter_numOutputRows;
> /* 017 */   private java.lang.String filter_errMsg;
> /* 018 */   private java.lang.String filter_errMsg1;
> /* 019 */   private UnsafeRow filter_result;
> /* 020 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder
> filter_holder;
> /* 021 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
> filter_rowWriter;
> /* 022 */   private org.apache.spark.sql.execution.metric.SQLMetric
> agg_numOutputRows;
> /* 023 */   private org.apache.spark.sql.execution.metric.SQLMetric
> agg_aggTime;
> /* 024 */   private UnsafeRow agg_result;
> /* 025 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
> /* 026 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
> agg_rowWriter;
> /* 027 */
> /* 028 */   public GeneratedIterator(Object[] references) {
> /* 029 */     this.references = references;
> /* 030 */   }
> /* 031 */
> /* 032 */   public void init(int index, scala.collection.Iterator
> inputs[]) {
> /* 033 */     partitionIndex = index;
> /* 034 */     agg_initAgg = false;
> /* 035 */
> /* 036 */     inputadapter_input = inputs[0];
> /* 037 */     this.filter_numOutputRows =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[0];
> /* 038 */     this.filter_errMsg = (java.lang.String) references[2];
> /* 039 */     this.filter_errMsg1 = (java.lang.String) references[3];
> /* 040 */     filter_result = new UnsafeRow(9);
> /* 041 */     this.filter_holder = new
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(filter_result,
> 224);
> /* 042 */     this.filter_rowWriter = new
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_holder,
> 9);
> /* 043 */     this.agg_numOutputRows =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[4];
> /* 044 */     this.agg_aggTime =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[5];
> /* 045 */     agg_result = new UnsafeRow(1);
> /* 046 */     this.agg_holder = new
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result,
> 0);
> /* 047 */     this.agg_rowWriter = new
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder,
> 1);
> /* 048 */   }
> /* 049 */
> /* 050 */   private void agg_doAggregateWithoutKey() throws
> java.io.IOException {
> /* 051 */     // initialize aggregation buffer
> /* 052 */     agg_bufIsNull = false;
> /* 053 */     agg_bufValue = 0L;
> /* 054 */
> /* 055 */     /*** PRODUCE: Project */
> /* 056 */
> /* 057 */     /*** PRODUCE: Filter
> test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 058 */
> /* 059 */     /*** PRODUCE: INPUT */
> /* 060 */
> /* 061 */     while (inputadapter_input.hasNext()) {
> /* 062 */       InternalRow inputadapter_row = (InternalRow)
> inputadapter_input.next();
> /* 063 */       /*** CONSUME: Filter
> test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 064 */
> /* 065 */       /* input[0, string] */
> /* 066 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
> /* 067 */       UTF8String inputadapter_value = inputadapter_isNull ? null
> : (inputadapter_row.getUTF8String(0));
> /* 068 */       /* input[1, bigint] */
> /* 069 */       long inputadapter_value1 = inputadapter_row.getLong(1);
> /* 070 */       /* input[2, string] */
> /* 071 */       boolean inputadapter_isNull2 =
> inputadapter_row.isNullAt(2);
> /* 072 */       UTF8String inputadapter_value2 = inputadapter_isNull2 ?
> null : (inputadapter_row.getUTF8String(2));
> /* 073 */       /* input[3, string] */
> /* 074 */       boolean inputadapter_isNull3 =
> inputadapter_row.isNullAt(3);
> /* 075 */       UTF8String inputadapter_value3 = inputadapter_isNull3 ?
> null : (inputadapter_row.getUTF8String(3));
> /* 076 */       /* input[4, string] */
> /* 077 */       boolean inputadapter_isNull4 =
> inputadapter_row.isNullAt(4);
> /* 078 */       UTF8String inputadapter_value4 = inputadapter_isNull4 ?
> null : (inputadapter_row.getUTF8String(4));
> /* 079 */       /* input[5, string] */
> /* 080 */       boolean inputadapter_isNull5 =
> inputadapter_row.isNullAt(5);
> /* 081 */       UTF8String inputadapter_value5 = inputadapter_isNull5 ?
> null : (inputadapter_row.getUTF8String(5));
> /* 082 */       /* input[6, string] */
> /* 083 */       boolean inputadapter_isNull6 =
> inputadapter_row.isNullAt(6);
> /* 084 */       UTF8String inputadapter_value6 = inputadapter_isNull6 ?
> null : (inputadapter_row.getUTF8String(6));
> /* 085 */       /* input[7, int] */
> /* 086 */       int inputadapter_value7 = inputadapter_row.getInt(7);
> /* 087 */       /* input[8, string] */
> /* 088 */       boolean inputadapter_isNull8 =
> inputadapter_row.isNullAt(8);
> /* 089 */       UTF8String inputadapter_value8 = inputadapter_isNull8 ?
> null : (inputadapter_row.getUTF8String(8));
> /* 090 */
> /* 091 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 092 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec */
> /* 093 */       /* expression: test1.Test1$$Lambda$5/454547627@6cdbe5ec */
> /* 094 */       Object filter_obj = ((Expression)
> references[1]).eval(null);
> /* 095 */       org.apache.spark.api.java.function.FilterFunction
> filter_value1 = (org.apache.spark.api.java.function.FilterFunction)
> filter_obj;
> /* 096 */       /* initializejavabean(newInstance(class
> test1.ApacheAccessLog), (setDateTimeString,input[2, string].toString),
> (setResponseCode,ass... */
> /* 097 */       /* newInstance(class test1.ApacheAccessLog) */
> /* 098 */       final test1.ApacheAccessLog filter_value3 = false ? null :
> new test1.ApacheAccessLog();
> /* 099 */       if (!false) {
> /* 100 */         /* input[2, string].toString */
> /* 101 */         boolean filter_isNull4 = inputadapter_isNull2;
> /* 102 */         final java.lang.String filter_value4 = filter_isNull4 ?
> null : (java.lang.String) inputadapter_value2.toString();
> /* 103 */         filter_isNull4 = filter_value4 == null;
> /* 104 */         filter_value3.setDateTimeString(filter_value4);
> /* 105 */
> /* 106 */         /* assertnotnull(input[7, int], currently no type path
> record in java) */
> /* 107 */         if (false) {
> /* 108 */           throw new RuntimeException(this.filter_errMsg);
> /* 109 */         }
> /* 110 */         filter_value3.setResponseCode(inputadapter_value7);
> /* 111 */
> /* 112 */         /* input[0, string].toString */
> /* 113 */         boolean filter_isNull8 = inputadapter_isNull;
> /* 114 */         final java.lang.String filter_value8 = filter_isNull8 ?
> null : (java.lang.String) inputadapter_value.toString();
> /* 115 */         filter_isNull8 = filter_value8 == null;
> /* 116 */         filter_value3.setClientIdentd(filter_value8);
> /* 117 */
> /* 118 */         /* input[3, string].toString */
> /* 119 */         boolean filter_isNull10 = inputadapter_isNull3;
> /* 120 */         final java.lang.String filter_value10 = filter_isNull10
> ? null : (java.lang.String) inputadapter_value3.toString();
> /* 121 */         filter_isNull10 = filter_value10 == null;
> /* 122 */         filter_value3.setEndpoint(filter_value10);
> /* 123 */
> /* 124 */         /* input[4, string].toString */
> /* 125 */         boolean filter_isNull12 = inputadapter_isNull4;
> /* 126 */         final java.lang.String filter_value12 = filter_isNull12
> ? null : (java.lang.String) inputadapter_value4.toString();
> /* 127 */         filter_isNull12 = filter_value12 == null;
> /* 128 */         filter_value3.setIpAddress(filter_value12);
> /* 129 */
> /* 130 */         /* input[5, string].toString */
> /* 131 */         boolean filter_isNull14 = inputadapter_isNull5;
> /* 132 */         final java.lang.String filter_value14 = filter_isNull14
> ? null : (java.lang.String) inputadapter_value5.toString();
> /* 133 */         filter_isNull14 = filter_value14 == null;
> /* 134 */         filter_value3.setMethod(filter_value14);
> /* 135 */
> /* 136 */         /* input[6, string].toString */
> /* 137 */         boolean filter_isNull16 = inputadapter_isNull6;
> /* 138 */         final java.lang.String filter_value16 = filter_isNull16
> ? null : (java.lang.String) inputadapter_value6.toString();
> /* 139 */         filter_isNull16 = filter_value16 == null;
> /* 140 */         filter_value3.setProtocol(filter_value16);
> /* 141 */
> /* 142 */         /* assertnotnull(input[1, bigint], currently no type
> path record in java) */
> /* 143 */         if (false) {
> /* 144 */           throw new RuntimeException(this.filter_errMsg1);
> /* 145 */         }
> /* 146 */         filter_value3.setContentSize(inputadapter_value1);
> /* 147 */
> /* 148 */         /* input[8, string].toString */
> /* 149 */         boolean filter_isNull20 = inputadapter_isNull8;
> /* 150 */         final java.lang.String filter_value20 = filter_isNull20
> ? null : (java.lang.String) inputadapter_value8.toString();
> /* 151 */         filter_isNull20 = filter_value20 == null;
> /* 152 */         filter_value3.setUserID(filter_value20);
> /* 153 */
> /* 154 */       }
> /* 155 */       boolean filter_isNull = false || false;
> /* 156 */
> /* 157 */       boolean filter_value = false;
> /* 158 */       try {
> /* 159 */         filter_value = filter_isNull ? false :
> filter_value1.call(filter_value3);
> /* 160 */       } catch (Exception e) {
> /* 161 */         org.apache.spark.unsafe.Platform.throwException(e);
> /* 162 */       }
> /* 163 */       if (filter_isNull || !filter_value) continue;
> /* 164 */
> /* 165 */       filter_numOutputRows.add(1);
> /* 166 */
> /* 167 */       /*** CONSUME: Project */
> /* 168 */
> /* 169 */       /*** CONSUME: TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
> /* 170 */
> /* 171 */       // do aggregate
> /* 172 */       // common sub-expressions
> /* 173 */
> /* 174 */       // evaluate aggregate function
> /* 175 */       /* (input[0, bigint] + 1) */
> /* 176 */       long agg_value1 = -1L;
> /* 177 */       agg_value1 = agg_bufValue + 1L;
> /* 178 */       // update aggregation buffer
> /* 179 */       agg_bufIsNull = false;
> /* 180 */       agg_bufValue = agg_value1;
> /* 181 */       if (shouldStop()) return;
> /* 182 */     }
> /* 183 */
> /* 184 */   }
> /* 185 */
> /* 186 */   protected void processNext() throws java.io.IOException {
> /* 187 */     /*** PRODUCE: TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
> /* 188 */
> /* 189 */     while (!agg_initAgg) {
> /* 190 */       agg_initAgg = true;
> /* 191 */       long agg_beforeAgg = System.nanoTime();
> /* 192 */       agg_doAggregateWithoutKey();
> /* 193 */       agg_aggTime.add((System.nanoTime() - agg_beforeAgg) /
> 1000000);
> /* 194 */
> /* 195 */       // output the result
> /* 196 */
> /* 197 */       agg_numOutputRows.add(1);
> /* 198 */       /*** CONSUME: WholeStageCodegen */
> /* 199 */
> /* 200 */       agg_rowWriter.zeroOutNullBytes();
> /* 201 */
> /* 202 */       if (agg_bufIsNull) {
> /* 203 */         agg_rowWriter.setNullAt(0);
> /* 204 */       } else {
> /* 205 */         agg_rowWriter.write(0, agg_bufValue);
> /* 206 */       }
> /* 207 */       append(agg_result);
> /* 208 */     }
> /* 209 */   }
> /* 210 */ }
>
> org.codehaus.commons.compiler.CompileException: File 'generated.java',
> Line 98, Column 63: No applicable constructor/method found for zero actual
> parameters; candidates are: "test1.ApacheAccessLog(java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String)"
> at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10174)
> at
> org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:7559)
> at
> org.codehaus.janino.UnitCompiler.invokeConstructor(UnitCompiler.java:6505)
> at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4126)
> at org.codehaus.janino.UnitCompiler.access$7600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$10.visitNewClassInstance(UnitCompiler.java:3275)
> at org.codehaus.janino.Java$NewClassInstance.accept(Java.java:4085)
> at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
> at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
> at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3560)
> at org.codehaus.janino.UnitCompiler.access$6600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$10.visitConditionalExpression(UnitCompiler.java:3260)
> at org.codehaus.janino.Java$ConditionalExpression.accept(Java.java:3441)
> at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
> at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1845)
> at org.codehaus.janino.UnitCompiler.access$2000(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$4.visitLocalVariableDeclarationStatement(UnitCompiler.java:945)
> at
> org.codehaus.janino.Java$LocalVariableDeclarationStatement.accept(Java.java:2508)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> at
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:993)
> at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:185)
> at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:935)
> at org.codehaus.janino.Java$Block.accept(Java.java:2012)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1273)
> at org.codehaus.janino.UnitCompiler.access$1500(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$4.visitWhileStatement(UnitCompiler.java:940)
> at org.codehaus.janino.Java$WhileStatement.accept(Java.java:2244)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> at
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2293)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:822)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:794)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:507)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> at
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> at
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> at
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:800)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:825)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:822)
> at
> org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> at
> org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at
> org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at
> org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:764)
> at
> org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:338)
> at
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
> at
> org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at
> org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
> at
> org.apache.spark.sql.execution.aggregate.TungstenAggregate.inputRDDs(TungstenAggregate.scala:134)
> at
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at
> org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240)
> at
> org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:287)
> at
> org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2122)
> at
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
> at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2436)
> at org.apache.spark.sql.Dataset.org
> $apache$spark$sql$Dataset$$execute$1(Dataset.scala:2121)
> at org.apache.spark.sql.Dataset.org
> $apache$spark$sql$Dataset$$collect(Dataset.scala:2128)
> at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2156)
> at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2155)
> at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2449)
> at org.apache.spark.sql.Dataset.count(Dataset.scala:2155)
> at test1.Test1.main(Test1.java:42)
> Exception in thread "main"
> org.apache.spark.sql.catalyst.errors.package$TreeNodeException: execute,
> tree:
> Exchange SinglePartition, None
> +- WholeStageCodegen
>    :  +- TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
>    :     +- Project
>    :        +- Filter test1.Test1$$Lambda$5/454547627@6cdbe5ec.call
>    :           +- INPUT
>    +- LocalTableScan
> [clientIdentd#26,contentSize#27L,dateTimeString#28,endpoint#29,ipAddress#30,method#31,protocol#32,responseCode#33,userID#34],
> [[0,5000000001,4d2,580000001a,7800000046,c000000007,c800000003,d000000008,c8,d800000001,2d,322f6c754a2f3132,303a30313a343130,38302d2030303a30,3030,726574706168632f,732f6176616a2f31,2f6e69616d2f6372,6d6f632f6176616a,697262617461642f,737070612f736b63,6f4c2f73676f6c2f,657a796c616e4167,6176616a2e72,312e312e312e31,544547,312e312f50545448,2d],[0,5000000001,7d0,580000001a,780000004f,c800000007,d000000003,d800000008,c8,e000000001,2d,322f6c754a2f3132,303a30313a343130,38302d2030303a30,3030,726574706168632f,732f6176616a2f31,2f6e69616d2f6372,6d6f632f6176616a,697262617461642f,737070612f736b63,6f4c2f73676f6c2f,657a796c616e4167,696d616572745372,6176616a2e676e,312e312e312e31,544547,312e312f50545448,2d],[0,5000000001,64,580000001a,7800000040,b800000007,c000000003,c800000008,191,d000000001,2d,322f6c754a2f3132,303a30313a343130,38302d2030303a30,3030,726574706168632f,732f6176616a2f31,2f6e69616d2f6372,6d6f632f6176616a,697262617461642f,737070612f736b63,72452f73676f6c2f,6176616a2e726f72,312e312e312e31,544547,312e312f50545448,2d]]
>
> at
> org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:50)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange.doExecute(ShuffleExchange.scala:113)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at
> org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:233)
> at
> org.apache.spark.sql.execution.aggregate.TungstenAggregate.inputRDDs(TungstenAggregate.scala:134)
> at
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:348)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at
> org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240)
> at
> org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:287)
> at
> org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2122)
> at
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
> at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2436)
> at org.apache.spark.sql.Dataset.org
> $apache$spark$sql$Dataset$$execute$1(Dataset.scala:2121)
> at org.apache.spark.sql.Dataset.org
> $apache$spark$sql$Dataset$$collect(Dataset.scala:2128)
> at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2156)
> at org.apache.spark.sql.Dataset$$anonfun$count$1.apply(Dataset.scala:2155)
> at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2449)
> at org.apache.spark.sql.Dataset.count(Dataset.scala:2155)
> at test1.Test1.main(Test1.java:42)
> Caused by: java.util.concurrent.ExecutionException: java.lang.Exception:
> failed to compile: org.codehaus.commons.compiler.CompileException: File
> 'generated.java', Line 98, Column 63: No applicable constructor/method
> found for zero actual parameters; candidates are:
> "test1.ApacheAccessLog(java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String)"
> /* 001 */ public Object generate(Object[] references) {
> /* 002 */   return new GeneratedIterator(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ /** Codegened pipeline for:
> /* 006 */ * TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
> /* 007 */ +- Project
> /* 008 */ +- Filter test...
> /* 009 */ */
> /* 010 */ final class GeneratedIterator extends
> org.apache.spark.sql.execution.BufferedRowIterator {
> /* 011 */   private Object[] references;
> /* 012 */   private boolean agg_initAgg;
> /* 013 */   private boolean agg_bufIsNull;
> /* 014 */   private long agg_bufValue;
> /* 015 */   private scala.collection.Iterator inputadapter_input;
> /* 016 */   private org.apache.spark.sql.execution.metric.SQLMetric
> filter_numOutputRows;
> /* 017 */   private java.lang.String filter_errMsg;
> /* 018 */   private java.lang.String filter_errMsg1;
> /* 019 */   private UnsafeRow filter_result;
> /* 020 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder
> filter_holder;
> /* 021 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
> filter_rowWriter;
> /* 022 */   private org.apache.spark.sql.execution.metric.SQLMetric
> agg_numOutputRows;
> /* 023 */   private org.apache.spark.sql.execution.metric.SQLMetric
> agg_aggTime;
> /* 024 */   private UnsafeRow agg_result;
> /* 025 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
> /* 026 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
> agg_rowWriter;
> /* 027 */
> /* 028 */   public GeneratedIterator(Object[] references) {
> /* 029 */     this.references = references;
> /* 030 */   }
> /* 031 */
> /* 032 */   public void init(int index, scala.collection.Iterator
> inputs[]) {
> /* 033 */     partitionIndex = index;
> /* 034 */     agg_initAgg = false;
> /* 035 */
> /* 036 */     inputadapter_input = inputs[0];
> /* 037 */     this.filter_numOutputRows =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[0];
> /* 038 */     this.filter_errMsg = (java.lang.String) references[2];
> /* 039 */     this.filter_errMsg1 = (java.lang.String) references[3];
> /* 040 */     filter_result = new UnsafeRow(9);
> /* 041 */     this.filter_holder = new
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(filter_result,
> 224);
> /* 042 */     this.filter_rowWriter = new
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_holder,
> 9);
> /* 043 */     this.agg_numOutputRows =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[4];
> /* 044 */     this.agg_aggTime =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[5];
> /* 045 */     agg_result = new UnsafeRow(1);
> /* 046 */     this.agg_holder = new
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result,
> 0);
> /* 047 */     this.agg_rowWriter = new
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder,
> 1);
> /* 048 */   }
> /* 049 */
> /* 050 */   private void agg_doAggregateWithoutKey() throws
> java.io.IOException {
> /* 051 */     // initialize aggregation buffer
> /* 052 */     agg_bufIsNull = false;
> /* 053 */     agg_bufValue = 0L;
> /* 054 */
> /* 055 */     /*** PRODUCE: Project */
> /* 056 */
> /* 057 */     /*** PRODUCE: Filter
> test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 058 */
> /* 059 */     /*** PRODUCE: INPUT */
> /* 060 */
> /* 061 */     while (inputadapter_input.hasNext()) {
> /* 062 */       InternalRow inputadapter_row = (InternalRow)
> inputadapter_input.next();
> /* 063 */       /*** CONSUME: Filter
> test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 064 */
> /* 065 */       /* input[0, string] */
> /* 066 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
> /* 067 */       UTF8String inputadapter_value = inputadapter_isNull ? null
> : (inputadapter_row.getUTF8String(0));
> /* 068 */       /* input[1, bigint] */
> /* 069 */       long inputadapter_value1 = inputadapter_row.getLong(1);
> /* 070 */       /* input[2, string] */
> /* 071 */       boolean inputadapter_isNull2 =
> inputadapter_row.isNullAt(2);
> /* 072 */       UTF8String inputadapter_value2 = inputadapter_isNull2 ?
> null : (inputadapter_row.getUTF8String(2));
> /* 073 */       /* input[3, string] */
> /* 074 */       boolean inputadapter_isNull3 =
> inputadapter_row.isNullAt(3);
> /* 075 */       UTF8String inputadapter_value3 = inputadapter_isNull3 ?
> null : (inputadapter_row.getUTF8String(3));
> /* 076 */       /* input[4, string] */
> /* 077 */       boolean inputadapter_isNull4 =
> inputadapter_row.isNullAt(4);
> /* 078 */       UTF8String inputadapter_value4 = inputadapter_isNull4 ?
> null : (inputadapter_row.getUTF8String(4));
> /* 079 */       /* input[5, string] */
> /* 080 */       boolean inputadapter_isNull5 =
> inputadapter_row.isNullAt(5);
> /* 081 */       UTF8String inputadapter_value5 = inputadapter_isNull5 ?
> null : (inputadapter_row.getUTF8String(5));
> /* 082 */       /* input[6, string] */
> /* 083 */       boolean inputadapter_isNull6 =
> inputadapter_row.isNullAt(6);
> /* 084 */       UTF8String inputadapter_value6 = inputadapter_isNull6 ?
> null : (inputadapter_row.getUTF8String(6));
> /* 085 */       /* input[7, int] */
> /* 086 */       int inputadapter_value7 = inputadapter_row.getInt(7);
> /* 087 */       /* input[8, string] */
> /* 088 */       boolean inputadapter_isNull8 =
> inputadapter_row.isNullAt(8);
> /* 089 */       UTF8String inputadapter_value8 = inputadapter_isNull8 ?
> null : (inputadapter_row.getUTF8String(8));
> /* 090 */
> /* 091 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 092 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec */
> /* 093 */       /* expression: test1.Test1$$Lambda$5/454547627@6cdbe5ec */
> /* 094 */       Object filter_obj = ((Expression)
> references[1]).eval(null);
> /* 095 */       org.apache.spark.api.java.function.FilterFunction
> filter_value1 = (org.apache.spark.api.java.function.FilterFunction)
> filter_obj;
> /* 096 */       /* initializejavabean(newInstance(class
> test1.ApacheAccessLog), (setDateTimeString,input[2, string].toString),
> (setResponseCode,ass... */
> /* 097 */       /* newInstance(class test1.ApacheAccessLog) */
> /* 098 */       final test1.ApacheAccessLog filter_value3 = false ? null :
> new test1.ApacheAccessLog();
> /* 099 */       if (!false) {
> /* 100 */         /* input[2, string].toString */
> /* 101 */         boolean filter_isNull4 = inputadapter_isNull2;
> /* 102 */         final java.lang.String filter_value4 = filter_isNull4 ?
> null : (java.lang.String) inputadapter_value2.toString();
> /* 103 */         filter_isNull4 = filter_value4 == null;
> /* 104 */         filter_value3.setDateTimeString(filter_value4);
> /* 105 */
> /* 106 */         /* assertnotnull(input[7, int], currently no type path
> record in java) */
> /* 107 */         if (false) {
> /* 108 */           throw new RuntimeException(this.filter_errMsg);
> /* 109 */         }
> /* 110 */         filter_value3.setResponseCode(inputadapter_value7);
> /* 111 */
> /* 112 */         /* input[0, string].toString */
> /* 113 */         boolean filter_isNull8 = inputadapter_isNull;
> /* 114 */         final java.lang.String filter_value8 = filter_isNull8 ?
> null : (java.lang.String) inputadapter_value.toString();
> /* 115 */         filter_isNull8 = filter_value8 == null;
> /* 116 */         filter_value3.setClientIdentd(filter_value8);
> /* 117 */
> /* 118 */         /* input[3, string].toString */
> /* 119 */         boolean filter_isNull10 = inputadapter_isNull3;
> /* 120 */         final java.lang.String filter_value10 = filter_isNull10
> ? null : (java.lang.String) inputadapter_value3.toString();
> /* 121 */         filter_isNull10 = filter_value10 == null;
> /* 122 */         filter_value3.setEndpoint(filter_value10);
> /* 123 */
> /* 124 */         /* input[4, string].toString */
> /* 125 */         boolean filter_isNull12 = inputadapter_isNull4;
> /* 126 */         final java.lang.String filter_value12 = filter_isNull12
> ? null : (java.lang.String) inputadapter_value4.toString();
> /* 127 */         filter_isNull12 = filter_value12 == null;
> /* 128 */         filter_value3.setIpAddress(filter_value12);
> /* 129 */
> /* 130 */         /* input[5, string].toString */
> /* 131 */         boolean filter_isNull14 = inputadapter_isNull5;
> /* 132 */         final java.lang.String filter_value14 = filter_isNull14
> ? null : (java.lang.String) inputadapter_value5.toString();
> /* 133 */         filter_isNull14 = filter_value14 == null;
> /* 134 */         filter_value3.setMethod(filter_value14);
> /* 135 */
> /* 136 */         /* input[6, string].toString */
> /* 137 */         boolean filter_isNull16 = inputadapter_isNull6;
> /* 138 */         final java.lang.String filter_value16 = filter_isNull16
> ? null : (java.lang.String) inputadapter_value6.toString();
> /* 139 */         filter_isNull16 = filter_value16 == null;
> /* 140 */         filter_value3.setProtocol(filter_value16);
> /* 141 */
> /* 142 */         /* assertnotnull(input[1, bigint], currently no type
> path record in java) */
> /* 143 */         if (false) {
> /* 144 */           throw new RuntimeException(this.filter_errMsg1);
> /* 145 */         }
> /* 146 */         filter_value3.setContentSize(inputadapter_value1);
> /* 147 */
> /* 148 */         /* input[8, string].toString */
> /* 149 */         boolean filter_isNull20 = inputadapter_isNull8;
> /* 150 */         final java.lang.String filter_value20 = filter_isNull20
> ? null : (java.lang.String) inputadapter_value8.toString();
> /* 151 */         filter_isNull20 = filter_value20 == null;
> /* 152 */         filter_value3.setUserID(filter_value20);
> /* 153 */
> /* 154 */       }
> /* 155 */       boolean filter_isNull = false || false;
> /* 156 */
> /* 157 */       boolean filter_value = false;
> /* 158 */       try {
> /* 159 */         filter_value = filter_isNull ? false :
> filter_value1.call(filter_value3);
> /* 160 */       } catch (Exception e) {
> /* 161 */         org.apache.spark.unsafe.Platform.throwException(e);
> /* 162 */       }
> /* 163 */       if (filter_isNull || !filter_value) continue;
> /* 164 */
> /* 165 */       filter_numOutputRows.add(1);
> /* 166 */
> /* 167 */       /*** CONSUME: Project */
> /* 168 */
> /* 169 */       /*** CONSUME: TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
> /* 170 */
> /* 171 */       // do aggregate
> /* 172 */       // common sub-expressions
> /* 173 */
> /* 174 */       // evaluate aggregate function
> /* 175 */       /* (input[0, bigint] + 1) */
> /* 176 */       long agg_value1 = -1L;
> /* 177 */       agg_value1 = agg_bufValue + 1L;
> /* 178 */       // update aggregation buffer
> /* 179 */       agg_bufIsNull = false;
> /* 180 */       agg_bufValue = agg_value1;
> /* 181 */       if (shouldStop()) return;
> /* 182 */     }
> /* 183 */
> /* 184 */   }
> /* 185 */
> /* 186 */   protected void processNext() throws java.io.IOException {
> /* 187 */     /*** PRODUCE: TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
> /* 188 */
> /* 189 */     while (!agg_initAgg) {
> /* 190 */       agg_initAgg = true;
> /* 191 */       long agg_beforeAgg = System.nanoTime();
> /* 192 */       agg_doAggregateWithoutKey();
> /* 193 */       agg_aggTime.add((System.nanoTime() - agg_beforeAgg) /
> 1000000);
> /* 194 */
> /* 195 */       // output the result
> /* 196 */
> /* 197 */       agg_numOutputRows.add(1);
> /* 198 */       /*** CONSUME: WholeStageCodegen */
> /* 199 */
> /* 200 */       agg_rowWriter.zeroOutNullBytes();
> /* 201 */
> /* 202 */       if (agg_bufIsNull) {
> /* 203 */         agg_rowWriter.setNullAt(0);
> /* 204 */       } else {
> /* 205 */         agg_rowWriter.write(0, agg_bufValue);
> /* 206 */       }
> /* 207 */       append(agg_result);
> /* 208 */     }
> /* 209 */   }
> /* 210 */ }
>
> at
> org.spark_project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
> at
> org.spark_project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
> at
> org.spark_project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
> at
> org.spark_project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
> at
> org.spark_project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
> at
> org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
> at
> org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at
> org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at
> org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:764)
> at
> org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:338)
> at
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:343)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)
> at
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange.prepareShuffleDependency(ShuffleExchange.scala:86)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:122)
> at
> org.apache.spark.sql.execution.exchange.ShuffleExchange$$anonfun$doExecute$1.apply(ShuffleExchange.scala:113)
> at
> org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:49)
> ... 28 more
> Caused by: java.lang.Exception: failed to compile:
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line
> 98, Column 63: No applicable constructor/method found for zero actual
> parameters; candidates are: "test1.ApacheAccessLog(java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String)"
> /* 001 */ public Object generate(Object[] references) {
> /* 002 */   return new GeneratedIterator(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ /** Codegened pipeline for:
> /* 006 */ * TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L])
> /* 007 */ +- Project
> /* 008 */ +- Filter test...
> /* 009 */ */
> /* 010 */ final class GeneratedIterator extends
> org.apache.spark.sql.execution.BufferedRowIterator {
> /* 011 */   private Object[] references;
> /* 012 */   private boolean agg_initAgg;
> /* 013 */   private boolean agg_bufIsNull;
> /* 014 */   private long agg_bufValue;
> /* 015 */   private scala.collection.Iterator inputadapter_input;
> /* 016 */   private org.apache.spark.sql.execution.metric.SQLMetric
> filter_numOutputRows;
> /* 017 */   private java.lang.String filter_errMsg;
> /* 018 */   private java.lang.String filter_errMsg1;
> /* 019 */   private UnsafeRow filter_result;
> /* 020 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder
> filter_holder;
> /* 021 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
> filter_rowWriter;
> /* 022 */   private org.apache.spark.sql.execution.metric.SQLMetric
> agg_numOutputRows;
> /* 023 */   private org.apache.spark.sql.execution.metric.SQLMetric
> agg_aggTime;
> /* 024 */   private UnsafeRow agg_result;
> /* 025 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder;
> /* 026 */   private
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
> agg_rowWriter;
> /* 027 */
> /* 028 */   public GeneratedIterator(Object[] references) {
> /* 029 */     this.references = references;
> /* 030 */   }
> /* 031 */
> /* 032 */   public void init(int index, scala.collection.Iterator
> inputs[]) {
> /* 033 */     partitionIndex = index;
> /* 034 */     agg_initAgg = false;
> /* 035 */
> /* 036 */     inputadapter_input = inputs[0];
> /* 037 */     this.filter_numOutputRows =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[0];
> /* 038 */     this.filter_errMsg = (java.lang.String) references[2];
> /* 039 */     this.filter_errMsg1 = (java.lang.String) references[3];
> /* 040 */     filter_result = new UnsafeRow(9);
> /* 041 */     this.filter_holder = new
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(filter_result,
> 224);
> /* 042 */     this.filter_rowWriter = new
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(filter_holder,
> 9);
> /* 043 */     this.agg_numOutputRows =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[4];
> /* 044 */     this.agg_aggTime =
> (org.apache.spark.sql.execution.metric.SQLMetric) references[5];
> /* 045 */     agg_result = new UnsafeRow(1);
> /* 046 */     this.agg_holder = new
> org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(agg_result,
> 0);
> /* 047 */     this.agg_rowWriter = new
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(agg_holder,
> 1);
> /* 048 */   }
> /* 049 */
> /* 050 */   private void agg_doAggregateWithoutKey() throws
> java.io.IOException {
> /* 051 */     // initialize aggregation buffer
> /* 052 */     agg_bufIsNull = false;
> /* 053 */     agg_bufValue = 0L;
> /* 054 */
> /* 055 */     /*** PRODUCE: Project */
> /* 056 */
> /* 057 */     /*** PRODUCE: Filter
> test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 058 */
> /* 059 */     /*** PRODUCE: INPUT */
> /* 060 */
> /* 061 */     while (inputadapter_input.hasNext()) {
> /* 062 */       InternalRow inputadapter_row = (InternalRow)
> inputadapter_input.next();
> /* 063 */       /*** CONSUME: Filter
> test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 064 */
> /* 065 */       /* input[0, string] */
> /* 066 */       boolean inputadapter_isNull = inputadapter_row.isNullAt(0);
> /* 067 */       UTF8String inputadapter_value = inputadapter_isNull ? null
> : (inputadapter_row.getUTF8String(0));
> /* 068 */       /* input[1, bigint] */
> /* 069 */       long inputadapter_value1 = inputadapter_row.getLong(1);
> /* 070 */       /* input[2, string] */
> /* 071 */       boolean inputadapter_isNull2 =
> inputadapter_row.isNullAt(2);
> /* 072 */       UTF8String inputadapter_value2 = inputadapter_isNull2 ?
> null : (inputadapter_row.getUTF8String(2));
> /* 073 */       /* input[3, string] */
> /* 074 */       boolean inputadapter_isNull3 =
> inputadapter_row.isNullAt(3);
> /* 075 */       UTF8String inputadapter_value3 = inputadapter_isNull3 ?
> null : (inputadapter_row.getUTF8String(3));
> /* 076 */       /* input[4, string] */
> /* 077 */       boolean inputadapter_isNull4 =
> inputadapter_row.isNullAt(4);
> /* 078 */       UTF8String inputadapter_value4 = inputadapter_isNull4 ?
> null : (inputadapter_row.getUTF8String(4));
> /* 079 */       /* input[5, string] */
> /* 080 */       boolean inputadapter_isNull5 =
> inputadapter_row.isNullAt(5);
> /* 081 */       UTF8String inputadapter_value5 = inputadapter_isNull5 ?
> null : (inputadapter_row.getUTF8String(5));
> /* 082 */       /* input[6, string] */
> /* 083 */       boolean inputadapter_isNull6 =
> inputadapter_row.isNullAt(6);
> /* 084 */       UTF8String inputadapter_value6 = inputadapter_isNull6 ?
> null : (inputadapter_row.getUTF8String(6));
> /* 085 */       /* input[7, int] */
> /* 086 */       int inputadapter_value7 = inputadapter_row.getInt(7);
> /* 087 */       /* input[8, string] */
> /* 088 */       boolean inputadapter_isNull8 =
> inputadapter_row.isNullAt(8);
> /* 089 */       UTF8String inputadapter_value8 = inputadapter_isNull8 ?
> null : (inputadapter_row.getUTF8String(8));
> /* 090 */
> /* 091 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec.call */
> /* 092 */       /* test1.Test1$$Lambda$5/454547627@6cdbe5ec */
> /* 093 */       /* expression: test1.Test1$$Lambda$5/454547627@6cdbe5ec */
> /* 094 */       Object filter_obj = ((Expression)
> references[1]).eval(null);
> /* 095 */       org.apache.spark.api.java.function.FilterFunction
> filter_value1 = (org.apache.spark.api.java.function.FilterFunction)
> filter_obj;
> /* 096 */       /* initializejavabean(newInstance(class
> test1.ApacheAccessLog), (setDateTimeString,input[2, string].toString),
> (setResponseCode,ass... */
> /* 097 */       /* newInstance(class test1.ApacheAccessLog) */
> /* 098 */       final test1.ApacheAccessLog filter_value3 = false ? null :
> new test1.ApacheAccessLog();
> /* 099 */       if (!false) {
> /* 100 */         /* input[2, string].toString */
> /* 101 */         boolean filter_isNull4 = inputadapter_isNull2;
> /* 102 */         final java.lang.String filter_value4 = filter_isNull4 ?
> null : (java.lang.String) inputadapter_value2.toString();
> /* 103 */         filter_isNull4 = filter_value4 == null;
> /* 104 */         filter_value3.setDateTimeString(filter_value4);
> /* 105 */
> /* 106 */         /* assertnotnull(input[7, int], currently no type path
> record in java) */
> /* 107 */         if (false) {
> /* 108 */           throw new RuntimeException(this.filter_errMsg);
> /* 109 */         }
> /* 110 */         filter_value3.setResponseCode(inputadapter_value7);
> /* 111 */
> /* 112 */         /* input[0, string].toString */
> /* 113 */         boolean filter_isNull8 = inputadapter_isNull;
> /* 114 */         final java.lang.String filter_value8 = filter_isNull8 ?
> null : (java.lang.String) inputadapter_value.toString();
> /* 115 */         filter_isNull8 = filter_value8 == null;
> /* 116 */         filter_value3.setClientIdentd(filter_value8);
> /* 117 */
> /* 118 */         /* input[3, string].toString */
> /* 119 */         boolean filter_isNull10 = inputadapter_isNull3;
> /* 120 */         final java.lang.String filter_value10 = filter_isNull10
> ? null : (java.lang.String) inputadapter_value3.toString();
> /* 121 */         filter_isNull10 = filter_value10 == null;
> /* 122 */         filter_value3.setEndpoint(filter_value10);
> /* 123 */
> /* 124 */         /* input[4, string].toString */
> /* 125 */         boolean filter_isNull12 = inputadapter_isNull4;
> /* 126 */         final java.lang.String filter_value12 = filter_isNull12
> ? null : (java.lang.String) inputadapter_value4.toString();
> /* 127 */         filter_isNull12 = filter_value12 == null;
> /* 128 */         filter_value3.setIpAddress(filter_value12);
> /* 129 */
> /* 130 */         /* input[5, string].toString */
> /* 131 */         boolean filter_isNull14 = inputadapter_isNull5;
> /* 132 */         final java.lang.String filter_value14 = filter_isNull14
> ? null : (java.lang.String) inputadapter_value5.toString();
> /* 133 */         filter_isNull14 = filter_value14 == null;
> /* 134 */         filter_value3.setMethod(filter_value14);
> /* 135 */
> /* 136 */         /* input[6, string].toString */
> /* 137 */         boolean filter_isNull16 = inputadapter_isNull6;
> /* 138 */         final java.lang.String filter_value16 = filter_isNull16
> ? null : (java.lang.String) inputadapter_value6.toString();
> /* 139 */         filter_isNull16 = filter_value16 == null;
> /* 140 */         filter_value3.setProtocol(filter_value16);
> /* 141 */
> /* 142 */         /* assertnotnull(input[1, bigint], currently no type
> path record in java) */
> /* 143 */         if (false) {
> /* 144 */           throw new RuntimeException(this.filter_errMsg1);
> /* 145 */         }
> /* 146 */         filter_value3.setContentSize(inputadapter_value1);
> /* 147 */
> /* 148 */         /* input[8, string].toString */
> /* 149 */         boolean filter_isNull20 = inputadapter_isNull8;
> /* 150 */         final java.lang.String filter_value20 = filter_isNull20
> ? null : (java.lang.String) inputadapter_value8.toString();
> /* 151 */         filter_isNull20 = filter_value20 == null;
> /* 152 */         filter_value3.setUserID(filter_value20);
> /* 153 */
> /* 154 */       }
> /* 155 */       boolean filter_isNull = false || false;
> /* 156 */
> /* 157 */       boolean filter_value = false;
> /* 158 */       try {
> /* 159 */         filter_value = filter_isNull ? false :
> filter_value1.call(filter_value3);
> /* 160 */       } catch (Exception e) {
> /* 161 */         org.apache.spark.unsafe.Platform.throwException(e);
> /* 162 */       }
> /* 163 */       if (filter_isNull || !filter_value) continue;
> /* 164 */
> /* 165 */       filter_numOutputRows.add(1);
> /* 166 */
> /* 167 */       /*** CONSUME: Project */
> /* 168 */
> /* 169 */       /*** CONSUME: TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
> /* 170 */
> /* 171 */       // do aggregate
> /* 172 */       // common sub-expressions
> /* 173 */
> /* 174 */       // evaluate aggregate function
> /* 175 */       /* (input[0, bigint] + 1) */
> /* 176 */       long agg_value1 = -1L;
> /* 177 */       agg_value1 = agg_bufValue + 1L;
> /* 178 */       // update aggregation buffer
> /* 179 */       agg_bufIsNull = false;
> /* 180 */       agg_bufValue = agg_value1;
> /* 181 */       if (shouldStop()) return;
> /* 182 */     }
> /* 183 */
> /* 184 */   }
> /* 185 */
> /* 186 */   protected void processNext() throws java.io.IOException {
> /* 187 */     /*** PRODUCE: TungstenAggregate(key=[],
> functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#58L]) */
> /* 188 */
> /* 189 */     while (!agg_initAgg) {
> /* 190 */       agg_initAgg = true;
> /* 191 */       long agg_beforeAgg = System.nanoTime();
> /* 192 */       agg_doAggregateWithoutKey();
> /* 193 */       agg_aggTime.add((System.nanoTime() - agg_beforeAgg) /
> 1000000);
> /* 194 */
> /* 195 */       // output the result
> /* 196 */
> /* 197 */       agg_numOutputRows.add(1);
> /* 198 */       /*** CONSUME: WholeStageCodegen */
> /* 199 */
> /* 200 */       agg_rowWriter.zeroOutNullBytes();
> /* 201 */
> /* 202 */       if (agg_bufIsNull) {
> /* 203 */         agg_rowWriter.setNullAt(0);
> /* 204 */       } else {
> /* 205 */         agg_rowWriter.write(0, agg_bufValue);
> /* 206 */       }
> /* 207 */       append(agg_result);
> /* 208 */     }
> /* 209 */   }
> /* 210 */ }
>
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:805)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:825)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:822)
> at
> org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> ... 46 more
> Caused by: org.codehaus.commons.compiler.CompileException: File
> 'generated.java', Line 98, Column 63: No applicable constructor/method
> found for zero actual parameters; candidates are:
> "test1.ApacheAccessLog(java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String, java.lang.String,
> java.lang.String, java.lang.String, java.lang.String)"
> at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:10174)
> at
> org.codehaus.janino.UnitCompiler.findMostSpecificIInvocable(UnitCompiler.java:7559)
> at
> org.codehaus.janino.UnitCompiler.invokeConstructor(UnitCompiler.java:6505)
> at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:4126)
> at org.codehaus.janino.UnitCompiler.access$7600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$10.visitNewClassInstance(UnitCompiler.java:3275)
> at org.codehaus.janino.Java$NewClassInstance.accept(Java.java:4085)
> at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
> at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
> at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:3560)
> at org.codehaus.janino.UnitCompiler.access$6600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$10.visitConditionalExpression(UnitCompiler.java:3260)
> at org.codehaus.janino.Java$ConditionalExpression.accept(Java.java:3441)
> at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:3290)
> at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4368)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1845)
> at org.codehaus.janino.UnitCompiler.access$2000(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$4.visitLocalVariableDeclarationStatement(UnitCompiler.java:945)
> at
> org.codehaus.janino.Java$LocalVariableDeclarationStatement.accept(Java.java:2508)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> at
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:993)
> at org.codehaus.janino.UnitCompiler.access$1000(UnitCompiler.java:185)
> at org.codehaus.janino.UnitCompiler$4.visitBlock(UnitCompiler.java:935)
> at org.codehaus.janino.Java$Block.accept(Java.java:2012)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1273)
> at org.codehaus.janino.UnitCompiler.access$1500(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$4.visitWhileStatement(UnitCompiler.java:940)
> at org.codehaus.janino.Java$WhileStatement.accept(Java.java:2244)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:958)
> at
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1007)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2293)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:822)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:794)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:507)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> at
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> at
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> at
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:800)
> ... 50 more
> 16/07/17 22:19:59 INFO SparkContext: Invoking stop() from shutdown hook
>
>