You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Kazuaki Ishizaki (JIRA)" <ji...@apache.org> on 2017/07/12 17:20:00 UTC

[jira] [Comment Edited] (SPARK-21391) Cannot convert a Seq of Map whose value type is again a seq, into a dataset

    [ https://issues.apache.org/jira/browse/SPARK-21391?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16084333#comment-16084333 ] 

Kazuaki Ishizaki edited comment on SPARK-21391 at 7/12/17 5:19 PM:
-------------------------------------------------------------------

This program works with the master or Spark 2.2. Would it be possible to use Spark 2.2?

{code}
+--------------------+
|          properties|
+--------------------+
|Map(A1 -> [Wrappe...|
|Map(A2 -> [Wrappe...|
+--------------------+
{code}



was (Author: kiszk):
This program works with the master and Spark 2.2. Would it be possible to use Spark 2.2?

{code}
+--------------------+
|          properties|
+--------------------+
|Map(A1 -> [Wrappe...|
|Map(A2 -> [Wrappe...|
+--------------------+
{code}


> Cannot convert a Seq of Map whose value type is again a seq, into a dataset 
> ----------------------------------------------------------------------------
>
>                 Key: SPARK-21391
>                 URL: https://issues.apache.org/jira/browse/SPARK-21391
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 2.1.0
>         Environment: Seen on mac OSX, scala 2.11, java 8
>            Reporter: indraneel rao
>
> There is an error while trying to create a dataset from a sequence of Maps, whose values have any kind of collections. Even when they are wrapped in a case class. 
> Eg : The following piece of code throws an error:
>    
> {code:java}
>     case class Values(values: Seq[Double])
>     case class ItemProperties(properties:Map[String,Values])
>     val l1 = List(ItemProperties(
>       Map(
>         "A1" -> Values(Seq(1.0,2.0)),
>         "B1" -> Values(Seq(44.0,55.0))
>       )
>     ),
>       ItemProperties(
>         Map(
>           "A2" -> Values(Seq(123.0,25.0)),
>           "B2" -> Values(Seq(445.0,35.0))
>         )
>       )
>     )
>     l1.toDS().show()
> {code}
> Heres the error:
> 17/07/12 21:59:35 ERROR CodeGenerator: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
> /* 001 */ public java.lang.Object generate(Object[] references) {
> /* 002 */   return new SpecificUnsafeProjection(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
> /* 006 */
> /* 007 */   private Object[] references;
> /* 008 */   private boolean resultIsNull;
> /* 009 */   private java.lang.String argValue;
> /* 010 */   private Object[] values;
> /* 011 */   private boolean resultIsNull1;
> /* 012 */   private scala.collection.Seq argValue1;
> /* 013 */   private boolean isNull11;
> /* 014 */   private boolean value11;
> /* 015 */   private boolean isNull12;
> /* 016 */   private InternalRow value12;
> /* 017 */   private boolean isNull13;
> /* 018 */   private InternalRow value13;
> /* 019 */   private UnsafeRow result;
> /* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
> /* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
> /* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
> /* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
> /* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
> /* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
> /* 026 */
> /* 027 */   public SpecificUnsafeProjection(Object[] references) {
> /* 028 */     this.references = references;
> /* 029 */
> /* 030 */
> /* 031 */     this.values = null;
> /* 032 */
> /* 033 */
> /* 034 */     isNull11 = false;
> /* 035 */     value11 = false;
> /* 036 */     isNull12 = false;
> /* 037 */     value12 = null;
> /* 038 */     isNull13 = false;
> /* 039 */     value13 = null;
> /* 040 */     result = new UnsafeRow(1);
> /* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
> /* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 047 */
> /* 048 */   }
> /* 049 */
> /* 050 */   public void initialize(int partitionIndex) {
> /* 051 */
> /* 052 */   }
> /* 053 */
> /* 054 */
> /* 055 */   private void evalIfTrueExpr(InternalRow i) {
> /* 056 */     final InternalRow value7 = null;
> /* 057 */     isNull12 = true;
> /* 058 */     value12 = value7;
> /* 059 */   }
> /* 060 */
> /* 061 */
> /* 062 */   private void evalIfCondExpr(InternalRow i) {
> /* 063 */
> /* 064 */     isNull11 = false;
> /* 065 */     value11 = ExternalMapToCatalyst_value_isNull0;
> /* 066 */   }
> /* 067 */
> /* 068 */
> /* 069 */   private void evalIfFalseExpr(InternalRow i) {
> /* 070 */     values = new Object[1];
> /* 071 */     resultIsNull1 = false;
> /* 072 */     if (!resultIsNull1) {
> /* 073 */
> /* 074 */       boolean isNull10 = true;
> /* 075 */       scala.collection.Seq value10 = null;
> /* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
> /* 077 */
> /* 078 */         isNull10 = false;
> /* 079 */         if (!isNull10) {
> /* 080 */
> /* 081 */           Object funcResult1 = null;
> /* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
> /* 083 */           if (funcResult1 == null) {
> /* 084 */             isNull10 = true;
> /* 085 */           } else {
> /* 086 */             value10 = (scala.collection.Seq) funcResult1;
> /* 087 */           }
> /* 088 */
> /* 089 */         }
> /* 090 */         isNull10 = value10 == null;
> /* 091 */       }
> /* 092 */       resultIsNull1 = isNull10;
> /* 093 */       argValue1 = value10;
> /* 094 */     }
> /* 095 */
> /* 096 */
> /* 097 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
> /* 098 */     if (resultIsNull1) {
> /* 099 */       values[0] = null;
> /* 100 */     } else {
> /* 101 */       values[0] = value9;
> /* 102 */     }
> /* 103 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
> /* 104 */     this.values = null;
> /* 105 */     isNull13 = false;
> /* 106 */     value13 = value8;
> /* 107 */   }
> /* 108 */
> /* 109 */
> /* 110 */   // Scala.Function1 need this
> /* 111 */   public java.lang.Object apply(java.lang.Object row) {
> /* 112 */     return apply((InternalRow) row);
> /* 113 */   }
> /* 114 */
> /* 115 */   public UnsafeRow apply(InternalRow i) {
> /* 116 */     holder.reset();
> /* 117 */
> /* 118 */     rowWriter.zeroOutNullBytes();
> /* 119 */
> /* 120 */
> /* 121 */     boolean isNull3 = i.isNullAt(0);
> /* 122 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
> /* 123 */
> /* 124 */     if (isNull3) {
> /* 125 */       throw new RuntimeException(((java.lang.String) references[0]));
> /* 126 */     }
> /* 127 */     boolean isNull1 = true;
> /* 128 */     scala.collection.immutable.Map value1 = null;
> /* 129 */     if (!false) {
> /* 130 */
> /* 131 */       isNull1 = false;
> /* 132 */       if (!isNull1) {
> /* 133 */
> /* 134 */         Object funcResult = null;
> /* 135 */         funcResult = value3.properties();
> /* 136 */         if (funcResult == null) {
> /* 137 */           isNull1 = true;
> /* 138 */         } else {
> /* 139 */           value1 = (scala.collection.immutable.Map) funcResult;
> /* 140 */         }
> /* 141 */
> /* 142 */       }
> /* 143 */       isNull1 = value1 == null;
> /* 144 */     }
> /* 145 */     MapData value = null;
> /* 146 */     if (!isNull1) {
> /* 147 */       final int length = value1.size();
> /* 148 */       final Object[] convertedKeys = new Object[length];
> /* 149 */       final Object[] convertedValues = new Object[length];
> /* 150 */       int index = 0;
> /* 151 */       final scala.collection.Iterator entries = value1.iterator();
> /* 152 */       while(entries.hasNext()) {
> /* 153 */
> /* 154 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
> /* 155 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
> /* 156 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
> /* 157 */
> /* 158 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
> /* 159 */
> /* 160 */
> /* 161 */         resultIsNull = false;
> /* 162 */         if (!resultIsNull) {
> /* 163 */
> /* 164 */           resultIsNull = false;
> /* 165 */           argValue = ExternalMapToCatalyst_key0;
> /* 166 */         }
> /* 167 */
> /* 168 */         boolean isNull4 = resultIsNull;
> /* 169 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
> /* 170 */         isNull4 = value4 == null;
> /* 171 */         if (isNull4) {
> /* 172 */           throw new RuntimeException("Cannot use null as map key!");
> /* 173 */         } else {
> /* 174 */           convertedKeys[index] = (UTF8String) value4;
> /* 175 */         }
> /* 176 */
> /* 177 */
> /* 178 */         evalIfCondExpr(i);
> /* 179 */         boolean isNull5 = false;
> /* 180 */         InternalRow value5 = null;
> /* 181 */         if (!isNull11 && value11) {
> /* 182 */           evalIfTrueExpr(i);
> /* 183 */           isNull5 = isNull12;
> /* 184 */           value5 = value12;
> /* 185 */         } else {
> /* 186 */           evalIfFalseExpr(i);
> /* 187 */           isNull5 = isNull13;
> /* 188 */           value5 = value13;
> /* 189 */         }
> /* 190 */         if (isNull5) {
> /* 191 */           convertedValues[index] = null;
> /* 192 */         } else {
> /* 193 */           convertedValues[index] = (InternalRow) value5;
> /* 194 */         }
> /* 195 */
> /* 196 */         index++;
> /* 197 */       }
> /* 198 */
> /* 199 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
> /* 200 */     }
> /* 201 */     if (isNull1) {
> /* 202 */       rowWriter.setNullAt(0);
> /* 203 */     } else {
> /* 204 */       // Remember the current cursor so that we can calculate how many bytes are
> /* 205 */       // written later.
> /* 206 */       final int tmpCursor = holder.cursor;
> /* 207 */
> /* 208 */       if (value instanceof UnsafeMapData) {
> /* 209 */
> /* 210 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
> /* 211 */         // grow the global buffer before writing data.
> /* 212 */         holder.grow(sizeInBytes);
> /* 213 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
> /* 214 */         holder.cursor += sizeInBytes;
> /* 215 */
> /* 216 */       } else {
> /* 217 */         final ArrayData keys = value.keyArray();
> /* 218 */         final ArrayData values1 = value.valueArray();
> /* 219 */
> /* 220 */         // preserve 8 bytes to write the key array numBytes later.
> /* 221 */         holder.grow(8);
> /* 222 */         holder.cursor += 8;
> /* 223 */
> /* 224 */         // Remember the current cursor so that we can write numBytes of key array later.
> /* 225 */         final int tmpCursor1 = holder.cursor;
> /* 226 */
> /* 227 */
> /* 228 */         if (keys instanceof UnsafeArrayData) {
> /* 229 */
> /* 230 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
> /* 231 */           // grow the global buffer before writing data.
> /* 232 */           holder.grow(sizeInBytes1);
> /* 233 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
> /* 234 */           holder.cursor += sizeInBytes1;
> /* 235 */
> /* 236 */         } else {
> /* 237 */           final int numElements = keys.numElements();
> /* 238 */           arrayWriter.initialize(holder, numElements, 8);
> /* 239 */
> /* 240 */           for (int index1 = 0; index1 < numElements; index1++) {
> /* 241 */             if (keys.isNullAt(index1)) {
> /* 242 */               arrayWriter.setNull(index1);
> /* 243 */             } else {
> /* 244 */               final UTF8String element = keys.getUTF8String(index1);
> /* 245 */               arrayWriter.write(index1, element);
> /* 246 */             }
> /* 247 */           }
> /* 248 */         }
> /* 249 */
> /* 250 */         // Write the numBytes of key array into the first 8 bytes.
> /* 251 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
> /* 252 */
> /* 253 */
> /* 254 */         if (values1 instanceof UnsafeArrayData) {
> /* 255 */
> /* 256 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
> /* 257 */           // grow the global buffer before writing data.
> /* 258 */           holder.grow(sizeInBytes4);
> /* 259 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
> /* 260 */           holder.cursor += sizeInBytes4;
> /* 261 */
> /* 262 */         } else {
> /* 263 */           final int numElements1 = values1.numElements();
> /* 264 */           arrayWriter1.initialize(holder, numElements1, 8);
> /* 265 */
> /* 266 */           for (int index2 = 0; index2 < numElements1; index2++) {
> /* 267 */             if (values1.isNullAt(index2)) {
> /* 268 */               arrayWriter1.setNull(index2);
> /* 269 */             } else {
> /* 270 */               final InternalRow element1 = values1.getStruct(index2, 1);
> /* 271 */
> /* 272 */               final int tmpCursor3 = holder.cursor;
> /* 273 */
> /* 274 */               if (element1 instanceof UnsafeRow) {
> /* 275 */
> /* 276 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
> /* 277 */                 // grow the global buffer before writing data.
> /* 278 */                 holder.grow(sizeInBytes2);
> /* 279 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
> /* 280 */                 holder.cursor += sizeInBytes2;
> /* 281 */
> /* 282 */               } else {
> /* 283 */                 rowWriter1.reset();
> /* 284 */
> /* 285 */                 final ArrayData fieldName = element1.getArray(0);
> /* 286 */                 if (element1.isNullAt(0)) {
> /* 287 */                   rowWriter1.setNullAt(0);
> /* 288 */                 } else {
> /* 289 */                   // Remember the current cursor so that we can calculate how many bytes are
> /* 290 */                   // written later.
> /* 291 */                   final int tmpCursor4 = holder.cursor;
> /* 292 */
> /* 293 */                   if (fieldName instanceof UnsafeArrayData) {
> /* 294 */
> /* 295 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
> /* 296 */                     // grow the global buffer before writing data.
> /* 297 */                     holder.grow(sizeInBytes3);
> /* 298 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
> /* 299 */                     holder.cursor += sizeInBytes3;
> /* 300 */
> /* 301 */                   } else {
> /* 302 */                     final int numElements2 = fieldName.numElements();
> /* 303 */                     arrayWriter2.initialize(holder, numElements2, 8);
> /* 304 */
> /* 305 */                     for (int index3 = 0; index3 < numElements2; index3++) {
> /* 306 */                       if (fieldName.isNullAt(index3)) {
> /* 307 */                         arrayWriter2.setNullDouble(index3);
> /* 308 */                       } else {
> /* 309 */                         final double element2 = fieldName.getDouble(index3);
> /* 310 */                         arrayWriter2.write(index3, element2);
> /* 311 */                       }
> /* 312 */                     }
> /* 313 */                   }
> /* 314 */
> /* 315 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
> /* 316 */                 }
> /* 317 */               }
> /* 318 */
> /* 319 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
> /* 320 */
> /* 321 */             }
> /* 322 */           }
> /* 323 */         }
> /* 324 */
> /* 325 */       }
> /* 326 */
> /* 327 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
> /* 328 */     }
> /* 329 */     result.setTotalSize(holder.totalSize());
> /* 330 */     return result;
> /* 331 */   }
> /* 332 */ }
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
> 	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
> 	at org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:6639)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5001)
> 	at org.codehaus.janino.UnitCompiler.access$10500(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$13.visitAmbiguousName(UnitCompiler.java:4984)
> 	at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:3633)
> 	at org.codehaus.janino.Java$Lvalue.accept(Java.java:3563)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
> 	at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4925)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
> 	at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
> 	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
> 	at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
> 	at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
> 	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
> 	at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> 	at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
> 	at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
> 	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
> 	at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
> 	at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
> 	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
> 	at org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
> 	at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
> 	at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
> 	at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
> 	at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
> 	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
> 	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:935)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:998)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:995)
> 	at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> 	at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> 	at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> 	at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> 	at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
> 	at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> 	at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:890)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:405)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:359)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:874)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:266)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:266)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:290)
> 	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
> 	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
> 	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> 	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> 	at scala.collection.immutable.List.foreach(List.scala:383)
> 	at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
> 	at scala.collection.immutable.List.map(List.scala:286)
> 	at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:429)
> 	at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:389)
> 	at org.apache.spark.sql.SQLImplicits.localSeqToDatasetHolder(SQLImplicits.scala:173)
> 	at local.TestApp$.main(TestApp.scala:63)
> 	at local.TestApp.main(TestApp.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
> Exception in thread "main" java.lang.RuntimeException: Error while encoding: java.util.concurrent.ExecutionException: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
> /* 001 */ public java.lang.Object generate(Object[] references) {
> /* 002 */   return new SpecificUnsafeProjection(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
> /* 006 */
> /* 007 */   private Object[] references;
> /* 008 */   private boolean resultIsNull;
> /* 009 */   private java.lang.String argValue;
> /* 010 */   private Object[] values;
> /* 011 */   private boolean resultIsNull1;
> /* 012 */   private scala.collection.Seq argValue1;
> /* 013 */   private boolean isNull11;
> /* 014 */   private boolean value11;
> /* 015 */   private boolean isNull12;
> /* 016 */   private InternalRow value12;
> /* 017 */   private boolean isNull13;
> /* 018 */   private InternalRow value13;
> /* 019 */   private UnsafeRow result;
> /* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
> /* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
> /* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
> /* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
> /* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
> /* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
> /* 026 */
> /* 027 */   public SpecificUnsafeProjection(Object[] references) {
> /* 028 */     this.references = references;
> /* 029 */
> /* 030 */
> /* 031 */     this.values = null;
> /* 032 */
> /* 033 */
> /* 034 */     isNull11 = false;
> /* 035 */     value11 = false;
> /* 036 */     isNull12 = false;
> /* 037 */     value12 = null;
> /* 038 */     isNull13 = false;
> /* 039 */     value13 = null;
> /* 040 */     result = new UnsafeRow(1);
> /* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
> /* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 047 */
> /* 048 */   }
> /* 049 */
> /* 050 */   public void initialize(int partitionIndex) {
> /* 051 */
> /* 052 */   }
> /* 053 */
> /* 054 */
> /* 055 */   private void evalIfTrueExpr(InternalRow i) {
> /* 056 */     final InternalRow value7 = null;
> /* 057 */     isNull12 = true;
> /* 058 */     value12 = value7;
> /* 059 */   }
> /* 060 */
> /* 061 */
> /* 062 */   private void evalIfCondExpr(InternalRow i) {
> /* 063 */
> /* 064 */     isNull11 = false;
> /* 065 */     value11 = ExternalMapToCatalyst_value_isNull0;
> /* 066 */   }
> /* 067 */
> /* 068 */
> /* 069 */   private void evalIfFalseExpr(InternalRow i) {
> /* 070 */     values = new Object[1];
> /* 071 */     resultIsNull1 = false;
> /* 072 */     if (!resultIsNull1) {
> /* 073 */
> /* 074 */       boolean isNull10 = true;
> /* 075 */       scala.collection.Seq value10 = null;
> /* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
> /* 077 */
> /* 078 */         isNull10 = false;
> /* 079 */         if (!isNull10) {
> /* 080 */
> /* 081 */           Object funcResult1 = null;
> /* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
> /* 083 */           if (funcResult1 == null) {
> /* 084 */             isNull10 = true;
> /* 085 */           } else {
> /* 086 */             value10 = (scala.collection.Seq) funcResult1;
> /* 087 */           }
> /* 088 */
> /* 089 */         }
> /* 090 */         isNull10 = value10 == null;
> /* 091 */       }
> /* 092 */       resultIsNull1 = isNull10;
> /* 093 */       argValue1 = value10;
> /* 094 */     }
> /* 095 */
> /* 096 */
> /* 097 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
> /* 098 */     if (resultIsNull1) {
> /* 099 */       values[0] = null;
> /* 100 */     } else {
> /* 101 */       values[0] = value9;
> /* 102 */     }
> /* 103 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
> /* 104 */     this.values = null;
> /* 105 */     isNull13 = false;
> /* 106 */     value13 = value8;
> /* 107 */   }
> /* 108 */
> /* 109 */
> /* 110 */   // Scala.Function1 need this
> /* 111 */   public java.lang.Object apply(java.lang.Object row) {
> /* 112 */     return apply((InternalRow) row);
> /* 113 */   }
> /* 114 */
> /* 115 */   public UnsafeRow apply(InternalRow i) {
> /* 116 */     holder.reset();
> /* 117 */
> /* 118 */     rowWriter.zeroOutNullBytes();
> /* 119 */
> /* 120 */
> /* 121 */     boolean isNull3 = i.isNullAt(0);
> /* 122 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
> /* 123 */
> /* 124 */     if (isNull3) {
> /* 125 */       throw new RuntimeException(((java.lang.String) references[0]));
> /* 126 */     }
> /* 127 */     boolean isNull1 = true;
> /* 128 */     scala.collection.immutable.Map value1 = null;
> /* 129 */     if (!false) {
> /* 130 */
> /* 131 */       isNull1 = false;
> /* 132 */       if (!isNull1) {
> /* 133 */
> /* 134 */         Object funcResult = null;
> /* 135 */         funcResult = value3.properties();
> /* 136 */         if (funcResult == null) {
> /* 137 */           isNull1 = true;
> /* 138 */         } else {
> /* 139 */           value1 = (scala.collection.immutable.Map) funcResult;
> /* 140 */         }
> /* 141 */
> /* 142 */       }
> /* 143 */       isNull1 = value1 == null;
> /* 144 */     }
> /* 145 */     MapData value = null;
> /* 146 */     if (!isNull1) {
> /* 147 */       final int length = value1.size();
> /* 148 */       final Object[] convertedKeys = new Object[length];
> /* 149 */       final Object[] convertedValues = new Object[length];
> /* 150 */       int index = 0;
> /* 151 */       final scala.collection.Iterator entries = value1.iterator();
> /* 152 */       while(entries.hasNext()) {
> /* 153 */
> /* 154 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
> /* 155 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
> /* 156 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
> /* 157 */
> /* 158 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
> /* 159 */
> /* 160 */
> /* 161 */         resultIsNull = false;
> /* 162 */         if (!resultIsNull) {
> /* 163 */
> /* 164 */           resultIsNull = false;
> /* 165 */           argValue = ExternalMapToCatalyst_key0;
> /* 166 */         }
> /* 167 */
> /* 168 */         boolean isNull4 = resultIsNull;
> /* 169 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
> /* 170 */         isNull4 = value4 == null;
> /* 171 */         if (isNull4) {
> /* 172 */           throw new RuntimeException("Cannot use null as map key!");
> /* 173 */         } else {
> /* 174 */           convertedKeys[index] = (UTF8String) value4;
> /* 175 */         }
> /* 176 */
> /* 177 */
> /* 178 */         evalIfCondExpr(i);
> /* 179 */         boolean isNull5 = false;
> /* 180 */         InternalRow value5 = null;
> /* 181 */         if (!isNull11 && value11) {
> /* 182 */           evalIfTrueExpr(i);
> /* 183 */           isNull5 = isNull12;
> /* 184 */           value5 = value12;
> /* 185 */         } else {
> /* 186 */           evalIfFalseExpr(i);
> /* 187 */           isNull5 = isNull13;
> /* 188 */           value5 = value13;
> /* 189 */         }
> /* 190 */         if (isNull5) {
> /* 191 */           convertedValues[index] = null;
> /* 192 */         } else {
> /* 193 */           convertedValues[index] = (InternalRow) value5;
> /* 194 */         }
> /* 195 */
> /* 196 */         index++;
> /* 197 */       }
> /* 198 */
> /* 199 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
> /* 200 */     }
> /* 201 */     if (isNull1) {
> /* 202 */       rowWriter.setNullAt(0);
> /* 203 */     } else {
> /* 204 */       // Remember the current cursor so that we can calculate how many bytes are
> /* 205 */       // written later.
> /* 206 */       final int tmpCursor = holder.cursor;
> /* 207 */
> /* 208 */       if (value instanceof UnsafeMapData) {
> /* 209 */
> /* 210 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
> /* 211 */         // grow the global buffer before writing data.
> /* 212 */         holder.grow(sizeInBytes);
> /* 213 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
> /* 214 */         holder.cursor += sizeInBytes;
> /* 215 */
> /* 216 */       } else {
> /* 217 */         final ArrayData keys = value.keyArray();
> /* 218 */         final ArrayData values1 = value.valueArray();
> /* 219 */
> /* 220 */         // preserve 8 bytes to write the key array numBytes later.
> /* 221 */         holder.grow(8);
> /* 222 */         holder.cursor += 8;
> /* 223 */
> /* 224 */         // Remember the current cursor so that we can write numBytes of key array later.
> /* 225 */         final int tmpCursor1 = holder.cursor;
> /* 226 */
> /* 227 */
> /* 228 */         if (keys instanceof UnsafeArrayData) {
> /* 229 */
> /* 230 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
> /* 231 */           // grow the global buffer before writing data.
> /* 232 */           holder.grow(sizeInBytes1);
> /* 233 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
> /* 234 */           holder.cursor += sizeInBytes1;
> /* 235 */
> /* 236 */         } else {
> /* 237 */           final int numElements = keys.numElements();
> /* 238 */           arrayWriter.initialize(holder, numElements, 8);
> /* 239 */
> /* 240 */           for (int index1 = 0; index1 < numElements; index1++) {
> /* 241 */             if (keys.isNullAt(index1)) {
> /* 242 */               arrayWriter.setNull(index1);
> /* 243 */             } else {
> /* 244 */               final UTF8String element = keys.getUTF8String(index1);
> /* 245 */               arrayWriter.write(index1, element);
> /* 246 */             }
> /* 247 */           }
> /* 248 */         }
> /* 249 */
> /* 250 */         // Write the numBytes of key array into the first 8 bytes.
> /* 251 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
> /* 252 */
> /* 253 */
> /* 254 */         if (values1 instanceof UnsafeArrayData) {
> /* 255 */
> /* 256 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
> /* 257 */           // grow the global buffer before writing data.
> /* 258 */           holder.grow(sizeInBytes4);
> /* 259 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
> /* 260 */           holder.cursor += sizeInBytes4;
> /* 261 */
> /* 262 */         } else {
> /* 263 */           final int numElements1 = values1.numElements();
> /* 264 */           arrayWriter1.initialize(holder, numElements1, 8);
> /* 265 */
> /* 266 */           for (int index2 = 0; index2 < numElements1; index2++) {
> /* 267 */             if (values1.isNullAt(index2)) {
> /* 268 */               arrayWriter1.setNull(index2);
> /* 269 */             } else {
> /* 270 */               final InternalRow element1 = values1.getStruct(index2, 1);
> /* 271 */
> /* 272 */               final int tmpCursor3 = holder.cursor;
> /* 273 */
> /* 274 */               if (element1 instanceof UnsafeRow) {
> /* 275 */
> /* 276 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
> /* 277 */                 // grow the global buffer before writing data.
> /* 278 */                 holder.grow(sizeInBytes2);
> /* 279 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
> /* 280 */                 holder.cursor += sizeInBytes2;
> /* 281 */
> /* 282 */               } else {
> /* 283 */                 rowWriter1.reset();
> /* 284 */
> /* 285 */                 final ArrayData fieldName = element1.getArray(0);
> /* 286 */                 if (element1.isNullAt(0)) {
> /* 287 */                   rowWriter1.setNullAt(0);
> /* 288 */                 } else {
> /* 289 */                   // Remember the current cursor so that we can calculate how many bytes are
> /* 290 */                   // written later.
> /* 291 */                   final int tmpCursor4 = holder.cursor;
> /* 292 */
> /* 293 */                   if (fieldName instanceof UnsafeArrayData) {
> /* 294 */
> /* 295 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
> /* 296 */                     // grow the global buffer before writing data.
> /* 297 */                     holder.grow(sizeInBytes3);
> /* 298 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
> /* 299 */                     holder.cursor += sizeInBytes3;
> /* 300 */
> /* 301 */                   } else {
> /* 302 */                     final int numElements2 = fieldName.numElements();
> /* 303 */                     arrayWriter2.initialize(holder, numElements2, 8);
> /* 304 */
> /* 305 */                     for (int index3 = 0; index3 < numElements2; index3++) {
> /* 306 */                       if (fieldName.isNullAt(index3)) {
> /* 307 */                         arrayWriter2.setNullDouble(index3);
> /* 308 */                       } else {
> /* 309 */                         final double element2 = fieldName.getDouble(index3);
> /* 310 */                         arrayWriter2.write(index3, element2);
> /* 311 */                       }
> /* 312 */                     }
> /* 313 */                   }
> /* 314 */
> /* 315 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
> /* 316 */                 }
> /* 317 */               }
> /* 318 */
> /* 319 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
> /* 320 */
> /* 321 */             }
> /* 322 */           }
> /* 323 */         }
> /* 324 */
> /* 325 */       }
> /* 326 */
> /* 327 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
> /* 328 */     }
> /* 329 */     result.setTotalSize(holder.totalSize());
> /* 330 */     return result;
> /* 331 */   }
> /* 332 */ }
> externalmaptocatalyst(ExternalMapToCatalyst_key0, ObjectType(class java.lang.String), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key0, false, ObjectType(class java.lang.String)), true), ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values), if (isnull(lambdavariable(ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values)))) null else named_struct(values, newInstance(class org.apache.spark.sql.catalyst.util.GenericArrayData)), assertnotnull(input[0, local.ItemProperties, true], top level Product input object).properties) AS properties#0
> +- externalmaptocatalyst(ExternalMapToCatalyst_key0, ObjectType(class java.lang.String), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key0, false, ObjectType(class java.lang.String)), true), ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values), if (isnull(lambdavariable(ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values)))) null else named_struct(values, newInstance(class org.apache.spark.sql.catalyst.util.GenericArrayData)), assertnotnull(input[0, local.ItemProperties, true], top level Product input object).properties)
>    +- assertnotnull(input[0, local.ItemProperties, true], top level Product input object).properties
>       +- assertnotnull(input[0, local.ItemProperties, true], top level Product input object)
>          +- input[0, local.ItemProperties, true]
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:293)
> 	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
> 	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
> 	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> 	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
> 	at scala.collection.immutable.List.foreach(List.scala:383)
> 	at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
> 	at scala.collection.immutable.List.map(List.scala:286)
> 	at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:429)
> 	at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:389)
> 	at org.apache.spark.sql.SQLImplicits.localSeqToDatasetHolder(SQLImplicits.scala:173)
> 	at local.TestApp$.main(TestApp.scala:63)
> 	at local.TestApp.main(TestApp.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
> Caused by: java.util.concurrent.ExecutionException: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
> /* 001 */ public java.lang.Object generate(Object[] references) {
> /* 002 */   return new SpecificUnsafeProjection(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
> /* 006 */
> /* 007 */   private Object[] references;
> /* 008 */   private boolean resultIsNull;
> /* 009 */   private java.lang.String argValue;
> /* 010 */   private Object[] values;
> /* 011 */   private boolean resultIsNull1;
> /* 012 */   private scala.collection.Seq argValue1;
> /* 013 */   private boolean isNull11;
> /* 014 */   private boolean value11;
> /* 015 */   private boolean isNull12;
> /* 016 */   private InternalRow value12;
> /* 017 */   private boolean isNull13;
> /* 018 */   private InternalRow value13;
> /* 019 */   private UnsafeRow result;
> /* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
> /* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
> /* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
> /* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
> /* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
> /* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
> /* 026 */
> /* 027 */   public SpecificUnsafeProjection(Object[] references) {
> /* 028 */     this.references = references;
> /* 029 */
> /* 030 */
> /* 031 */     this.values = null;
> /* 032 */
> /* 033 */
> /* 034 */     isNull11 = false;
> /* 035 */     value11 = false;
> /* 036 */     isNull12 = false;
> /* 037 */     value12 = null;
> /* 038 */     isNull13 = false;
> /* 039 */     value13 = null;
> /* 040 */     result = new UnsafeRow(1);
> /* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
> /* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 047 */
> /* 048 */   }
> /* 049 */
> /* 050 */   public void initialize(int partitionIndex) {
> /* 051 */
> /* 052 */   }
> /* 053 */
> /* 054 */
> /* 055 */   private void evalIfTrueExpr(InternalRow i) {
> /* 056 */     final InternalRow value7 = null;
> /* 057 */     isNull12 = true;
> /* 058 */     value12 = value7;
> /* 059 */   }
> /* 060 */
> /* 061 */
> /* 062 */   private void evalIfCondExpr(InternalRow i) {
> /* 063 */
> /* 064 */     isNull11 = false;
> /* 065 */     value11 = ExternalMapToCatalyst_value_isNull0;
> /* 066 */   }
> /* 067 */
> /* 068 */
> /* 069 */   private void evalIfFalseExpr(InternalRow i) {
> /* 070 */     values = new Object[1];
> /* 071 */     resultIsNull1 = false;
> /* 072 */     if (!resultIsNull1) {
> /* 073 */
> /* 074 */       boolean isNull10 = true;
> /* 075 */       scala.collection.Seq value10 = null;
> /* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
> /* 077 */
> /* 078 */         isNull10 = false;
> /* 079 */         if (!isNull10) {
> /* 080 */
> /* 081 */           Object funcResult1 = null;
> /* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
> /* 083 */           if (funcResult1 == null) {
> /* 084 */             isNull10 = true;
> /* 085 */           } else {
> /* 086 */             value10 = (scala.collection.Seq) funcResult1;
> /* 087 */           }
> /* 088 */
> /* 089 */         }
> /* 090 */         isNull10 = value10 == null;
> /* 091 */       }
> /* 092 */       resultIsNull1 = isNull10;
> /* 093 */       argValue1 = value10;
> /* 094 */     }
> /* 095 */
> /* 096 */
> /* 097 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
> /* 098 */     if (resultIsNull1) {
> /* 099 */       values[0] = null;
> /* 100 */     } else {
> /* 101 */       values[0] = value9;
> /* 102 */     }
> /* 103 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
> /* 104 */     this.values = null;
> /* 105 */     isNull13 = false;
> /* 106 */     value13 = value8;
> /* 107 */   }
> /* 108 */
> /* 109 */
> /* 110 */   // Scala.Function1 need this
> /* 111 */   public java.lang.Object apply(java.lang.Object row) {
> /* 112 */     return apply((InternalRow) row);
> /* 113 */   }
> /* 114 */
> /* 115 */   public UnsafeRow apply(InternalRow i) {
> /* 116 */     holder.reset();
> /* 117 */
> /* 118 */     rowWriter.zeroOutNullBytes();
> /* 119 */
> /* 120 */
> /* 121 */     boolean isNull3 = i.isNullAt(0);
> /* 122 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
> /* 123 */
> /* 124 */     if (isNull3) {
> /* 125 */       throw new RuntimeException(((java.lang.String) references[0]));
> /* 126 */     }
> /* 127 */     boolean isNull1 = true;
> /* 128 */     scala.collection.immutable.Map value1 = null;
> /* 129 */     if (!false) {
> /* 130 */
> /* 131 */       isNull1 = false;
> /* 132 */       if (!isNull1) {
> /* 133 */
> /* 134 */         Object funcResult = null;
> /* 135 */         funcResult = value3.properties();
> /* 136 */         if (funcResult == null) {
> /* 137 */           isNull1 = true;
> /* 138 */         } else {
> /* 139 */           value1 = (scala.collection.immutable.Map) funcResult;
> /* 140 */         }
> /* 141 */
> /* 142 */       }
> /* 143 */       isNull1 = value1 == null;
> /* 144 */     }
> /* 145 */     MapData value = null;
> /* 146 */     if (!isNull1) {
> /* 147 */       final int length = value1.size();
> /* 148 */       final Object[] convertedKeys = new Object[length];
> /* 149 */       final Object[] convertedValues = new Object[length];
> /* 150 */       int index = 0;
> /* 151 */       final scala.collection.Iterator entries = value1.iterator();
> /* 152 */       while(entries.hasNext()) {
> /* 153 */
> /* 154 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
> /* 155 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
> /* 156 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
> /* 157 */
> /* 158 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
> /* 159 */
> /* 160 */
> /* 161 */         resultIsNull = false;
> /* 162 */         if (!resultIsNull) {
> /* 163 */
> /* 164 */           resultIsNull = false;
> /* 165 */           argValue = ExternalMapToCatalyst_key0;
> /* 166 */         }
> /* 167 */
> /* 168 */         boolean isNull4 = resultIsNull;
> /* 169 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
> /* 170 */         isNull4 = value4 == null;
> /* 171 */         if (isNull4) {
> /* 172 */           throw new RuntimeException("Cannot use null as map key!");
> /* 173 */         } else {
> /* 174 */           convertedKeys[index] = (UTF8String) value4;
> /* 175 */         }
> /* 176 */
> /* 177 */
> /* 178 */         evalIfCondExpr(i);
> /* 179 */         boolean isNull5 = false;
> /* 180 */         InternalRow value5 = null;
> /* 181 */         if (!isNull11 && value11) {
> /* 182 */           evalIfTrueExpr(i);
> /* 183 */           isNull5 = isNull12;
> /* 184 */           value5 = value12;
> /* 185 */         } else {
> /* 186 */           evalIfFalseExpr(i);
> /* 187 */           isNull5 = isNull13;
> /* 188 */           value5 = value13;
> /* 189 */         }
> /* 190 */         if (isNull5) {
> /* 191 */           convertedValues[index] = null;
> /* 192 */         } else {
> /* 193 */           convertedValues[index] = (InternalRow) value5;
> /* 194 */         }
> /* 195 */
> /* 196 */         index++;
> /* 197 */       }
> /* 198 */
> /* 199 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
> /* 200 */     }
> /* 201 */     if (isNull1) {
> /* 202 */       rowWriter.setNullAt(0);
> /* 203 */     } else {
> /* 204 */       // Remember the current cursor so that we can calculate how many bytes are
> /* 205 */       // written later.
> /* 206 */       final int tmpCursor = holder.cursor;
> /* 207 */
> /* 208 */       if (value instanceof UnsafeMapData) {
> /* 209 */
> /* 210 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
> /* 211 */         // grow the global buffer before writing data.
> /* 212 */         holder.grow(sizeInBytes);
> /* 213 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
> /* 214 */         holder.cursor += sizeInBytes;
> /* 215 */
> /* 216 */       } else {
> /* 217 */         final ArrayData keys = value.keyArray();
> /* 218 */         final ArrayData values1 = value.valueArray();
> /* 219 */
> /* 220 */         // preserve 8 bytes to write the key array numBytes later.
> /* 221 */         holder.grow(8);
> /* 222 */         holder.cursor += 8;
> /* 223 */
> /* 224 */         // Remember the current cursor so that we can write numBytes of key array later.
> /* 225 */         final int tmpCursor1 = holder.cursor;
> /* 226 */
> /* 227 */
> /* 228 */         if (keys instanceof UnsafeArrayData) {
> /* 229 */
> /* 230 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
> /* 231 */           // grow the global buffer before writing data.
> /* 232 */           holder.grow(sizeInBytes1);
> /* 233 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
> /* 234 */           holder.cursor += sizeInBytes1;
> /* 235 */
> /* 236 */         } else {
> /* 237 */           final int numElements = keys.numElements();
> /* 238 */           arrayWriter.initialize(holder, numElements, 8);
> /* 239 */
> /* 240 */           for (int index1 = 0; index1 < numElements; index1++) {
> /* 241 */             if (keys.isNullAt(index1)) {
> /* 242 */               arrayWriter.setNull(index1);
> /* 243 */             } else {
> /* 244 */               final UTF8String element = keys.getUTF8String(index1);
> /* 245 */               arrayWriter.write(index1, element);
> /* 246 */             }
> /* 247 */           }
> /* 248 */         }
> /* 249 */
> /* 250 */         // Write the numBytes of key array into the first 8 bytes.
> /* 251 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
> /* 252 */
> /* 253 */
> /* 254 */         if (values1 instanceof UnsafeArrayData) {
> /* 255 */
> /* 256 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
> /* 257 */           // grow the global buffer before writing data.
> /* 258 */           holder.grow(sizeInBytes4);
> /* 259 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
> /* 260 */           holder.cursor += sizeInBytes4;
> /* 261 */
> /* 262 */         } else {
> /* 263 */           final int numElements1 = values1.numElements();
> /* 264 */           arrayWriter1.initialize(holder, numElements1, 8);
> /* 265 */
> /* 266 */           for (int index2 = 0; index2 < numElements1; index2++) {
> /* 267 */             if (values1.isNullAt(index2)) {
> /* 268 */               arrayWriter1.setNull(index2);
> /* 269 */             } else {
> /* 270 */               final InternalRow element1 = values1.getStruct(index2, 1);
> /* 271 */
> /* 272 */               final int tmpCursor3 = holder.cursor;
> /* 273 */
> /* 274 */               if (element1 instanceof UnsafeRow) {
> /* 275 */
> /* 276 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
> /* 277 */                 // grow the global buffer before writing data.
> /* 278 */                 holder.grow(sizeInBytes2);
> /* 279 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
> /* 280 */                 holder.cursor += sizeInBytes2;
> /* 281 */
> /* 282 */               } else {
> /* 283 */                 rowWriter1.reset();
> /* 284 */
> /* 285 */                 final ArrayData fieldName = element1.getArray(0);
> /* 286 */                 if (element1.isNullAt(0)) {
> /* 287 */                   rowWriter1.setNullAt(0);
> /* 288 */                 } else {
> /* 289 */                   // Remember the current cursor so that we can calculate how many bytes are
> /* 290 */                   // written later.
> /* 291 */                   final int tmpCursor4 = holder.cursor;
> /* 292 */
> /* 293 */                   if (fieldName instanceof UnsafeArrayData) {
> /* 294 */
> /* 295 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
> /* 296 */                     // grow the global buffer before writing data.
> /* 297 */                     holder.grow(sizeInBytes3);
> /* 298 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
> /* 299 */                     holder.cursor += sizeInBytes3;
> /* 300 */
> /* 301 */                   } else {
> /* 302 */                     final int numElements2 = fieldName.numElements();
> /* 303 */                     arrayWriter2.initialize(holder, numElements2, 8);
> /* 304 */
> /* 305 */                     for (int index3 = 0; index3 < numElements2; index3++) {
> /* 306 */                       if (fieldName.isNullAt(index3)) {
> /* 307 */                         arrayWriter2.setNullDouble(index3);
> /* 308 */                       } else {
> /* 309 */                         final double element2 = fieldName.getDouble(index3);
> /* 310 */                         arrayWriter2.write(index3, element2);
> /* 311 */                       }
> /* 312 */                     }
> /* 313 */                   }
> /* 314 */
> /* 315 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
> /* 316 */                 }
> /* 317 */               }
> /* 318 */
> /* 319 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
> /* 320 */
> /* 321 */             }
> /* 322 */           }
> /* 323 */         }
> /* 324 */
> /* 325 */       }
> /* 326 */
> /* 327 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
> /* 328 */     }
> /* 329 */     result.setTotalSize(holder.totalSize());
> /* 330 */     return result;
> /* 331 */   }
> /* 332 */ }
> 	at org.spark_project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
> 	at org.spark_project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
> 	at org.spark_project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
> 	at org.spark_project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
> 	at org.spark_project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
> 	at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
> 	at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> 	at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> 	at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
> 	at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> 	at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:890)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:405)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:359)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:874)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:266)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:266)
> 	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:290)
> 	... 17 more
> Caused by: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
> /* 001 */ public java.lang.Object generate(Object[] references) {
> /* 002 */   return new SpecificUnsafeProjection(references);
> /* 003 */ }
> /* 004 */
> /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
> /* 006 */
> /* 007 */   private Object[] references;
> /* 008 */   private boolean resultIsNull;
> /* 009 */   private java.lang.String argValue;
> /* 010 */   private Object[] values;
> /* 011 */   private boolean resultIsNull1;
> /* 012 */   private scala.collection.Seq argValue1;
> /* 013 */   private boolean isNull11;
> /* 014 */   private boolean value11;
> /* 015 */   private boolean isNull12;
> /* 016 */   private InternalRow value12;
> /* 017 */   private boolean isNull13;
> /* 018 */   private InternalRow value13;
> /* 019 */   private UnsafeRow result;
> /* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
> /* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
> /* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
> /* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
> /* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
> /* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
> /* 026 */
> /* 027 */   public SpecificUnsafeProjection(Object[] references) {
> /* 028 */     this.references = references;
> /* 029 */
> /* 030 */
> /* 031 */     this.values = null;
> /* 032 */
> /* 033 */
> /* 034 */     isNull11 = false;
> /* 035 */     value11 = false;
> /* 036 */     isNull12 = false;
> /* 037 */     value12 = null;
> /* 038 */     isNull13 = false;
> /* 039 */     value13 = null;
> /* 040 */     result = new UnsafeRow(1);
> /* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
> /* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
> /* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
> /* 047 */
> /* 048 */   }
> /* 049 */
> /* 050 */   public void initialize(int partitionIndex) {
> /* 051 */
> /* 052 */   }
> /* 053 */
> /* 054 */
> /* 055 */   private void evalIfTrueExpr(InternalRow i) {
> /* 056 */     final InternalRow value7 = null;
> /* 057 */     isNull12 = true;
> /* 058 */     value12 = value7;
> /* 059 */   }
> /* 060 */
> /* 061 */
> /* 062 */   private void evalIfCondExpr(InternalRow i) {
> /* 063 */
> /* 064 */     isNull11 = false;
> /* 065 */     value11 = ExternalMapToCatalyst_value_isNull0;
> /* 066 */   }
> /* 067 */
> /* 068 */
> /* 069 */   private void evalIfFalseExpr(InternalRow i) {
> /* 070 */     values = new Object[1];
> /* 071 */     resultIsNull1 = false;
> /* 072 */     if (!resultIsNull1) {
> /* 073 */
> /* 074 */       boolean isNull10 = true;
> /* 075 */       scala.collection.Seq value10 = null;
> /* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
> /* 077 */
> /* 078 */         isNull10 = false;
> /* 079 */         if (!isNull10) {
> /* 080 */
> /* 081 */           Object funcResult1 = null;
> /* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
> /* 083 */           if (funcResult1 == null) {
> /* 084 */             isNull10 = true;
> /* 085 */           } else {
> /* 086 */             value10 = (scala.collection.Seq) funcResult1;
> /* 087 */           }
> /* 088 */
> /* 089 */         }
> /* 090 */         isNull10 = value10 == null;
> /* 091 */       }
> /* 092 */       resultIsNull1 = isNull10;
> /* 093 */       argValue1 = value10;
> /* 094 */     }
> /* 095 */
> /* 096 */
> /* 097 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
> /* 098 */     if (resultIsNull1) {
> /* 099 */       values[0] = null;
> /* 100 */     } else {
> /* 101 */       values[0] = value9;
> /* 102 */     }
> /* 103 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
> /* 104 */     this.values = null;
> /* 105 */     isNull13 = false;
> /* 106 */     value13 = value8;
> /* 107 */   }
> /* 108 */
> /* 109 */
> /* 110 */   // Scala.Function1 need this
> /* 111 */   public java.lang.Object apply(java.lang.Object row) {
> /* 112 */     return apply((InternalRow) row);
> /* 113 */   }
> /* 114 */
> /* 115 */   public UnsafeRow apply(InternalRow i) {
> /* 116 */     holder.reset();
> /* 117 */
> /* 118 */     rowWriter.zeroOutNullBytes();
> /* 119 */
> /* 120 */
> /* 121 */     boolean isNull3 = i.isNullAt(0);
> /* 122 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
> /* 123 */
> /* 124 */     if (isNull3) {
> /* 125 */       throw new RuntimeException(((java.lang.String) references[0]));
> /* 126 */     }
> /* 127 */     boolean isNull1 = true;
> /* 128 */     scala.collection.immutable.Map value1 = null;
> /* 129 */     if (!false) {
> /* 130 */
> /* 131 */       isNull1 = false;
> /* 132 */       if (!isNull1) {
> /* 133 */
> /* 134 */         Object funcResult = null;
> /* 135 */         funcResult = value3.properties();
> /* 136 */         if (funcResult == null) {
> /* 137 */           isNull1 = true;
> /* 138 */         } else {
> /* 139 */           value1 = (scala.collection.immutable.Map) funcResult;
> /* 140 */         }
> /* 141 */
> /* 142 */       }
> /* 143 */       isNull1 = value1 == null;
> /* 144 */     }
> /* 145 */     MapData value = null;
> /* 146 */     if (!isNull1) {
> /* 147 */       final int length = value1.size();
> /* 148 */       final Object[] convertedKeys = new Object[length];
> /* 149 */       final Object[] convertedValues = new Object[length];
> /* 150 */       int index = 0;
> /* 151 */       final scala.collection.Iterator entries = value1.iterator();
> /* 152 */       while(entries.hasNext()) {
> /* 153 */
> /* 154 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
> /* 155 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
> /* 156 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
> /* 157 */
> /* 158 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
> /* 159 */
> /* 160 */
> /* 161 */         resultIsNull = false;
> /* 162 */         if (!resultIsNull) {
> /* 163 */
> /* 164 */           resultIsNull = false;
> /* 165 */           argValue = ExternalMapToCatalyst_key0;
> /* 166 */         }
> /* 167 */
> /* 168 */         boolean isNull4 = resultIsNull;
> /* 169 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
> /* 170 */         isNull4 = value4 == null;
> /* 171 */         if (isNull4) {
> /* 172 */           throw new RuntimeException("Cannot use null as map key!");
> /* 173 */         } else {
> /* 174 */           convertedKeys[index] = (UTF8String) value4;
> /* 175 */         }
> /* 176 */
> /* 177 */
> /* 178 */         evalIfCondExpr(i);
> /* 179 */         boolean isNull5 = false;
> /* 180 */         InternalRow value5 = null;
> /* 181 */         if (!isNull11 && value11) {
> /* 182 */           evalIfTrueExpr(i);
> /* 183 */           isNull5 = isNull12;
> /* 184 */           value5 = value12;
> /* 185 */         } else {
> /* 186 */           evalIfFalseExpr(i);
> /* 187 */           isNull5 = isNull13;
> /* 188 */           value5 = value13;
> /* 189 */         }
> /* 190 */         if (isNull5) {
> /* 191 */           convertedValues[index] = null;
> /* 192 */         } else {
> /* 193 */           convertedValues[index] = (InternalRow) value5;
> /* 194 */         }
> /* 195 */
> /* 196 */         index++;
> /* 197 */       }
> /* 198 */
> /* 199 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
> /* 200 */     }
> /* 201 */     if (isNull1) {
> /* 202 */       rowWriter.setNullAt(0);
> /* 203 */     } else {
> /* 204 */       // Remember the current cursor so that we can calculate how many bytes are
> /* 205 */       // written later.
> /* 206 */       final int tmpCursor = holder.cursor;
> /* 207 */
> /* 208 */       if (value instanceof UnsafeMapData) {
> /* 209 */
> /* 210 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
> /* 211 */         // grow the global buffer before writing data.
> /* 212 */         holder.grow(sizeInBytes);
> /* 213 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
> /* 214 */         holder.cursor += sizeInBytes;
> /* 215 */
> /* 216 */       } else {
> /* 217 */         final ArrayData keys = value.keyArray();
> /* 218 */         final ArrayData values1 = value.valueArray();
> /* 219 */
> /* 220 */         // preserve 8 bytes to write the key array numBytes later.
> /* 221 */         holder.grow(8);
> /* 222 */         holder.cursor += 8;
> /* 223 */
> /* 224 */         // Remember the current cursor so that we can write numBytes of key array later.
> /* 225 */         final int tmpCursor1 = holder.cursor;
> /* 226 */
> /* 227 */
> /* 228 */         if (keys instanceof UnsafeArrayData) {
> /* 229 */
> /* 230 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
> /* 231 */           // grow the global buffer before writing data.
> /* 232 */           holder.grow(sizeInBytes1);
> /* 233 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
> /* 234 */           holder.cursor += sizeInBytes1;
> /* 235 */
> /* 236 */         } else {
> /* 237 */           final int numElements = keys.numElements();
> /* 238 */           arrayWriter.initialize(holder, numElements, 8);
> /* 239 */
> /* 240 */           for (int index1 = 0; index1 < numElements; index1++) {
> /* 241 */             if (keys.isNullAt(index1)) {
> /* 242 */               arrayWriter.setNull(index1);
> /* 243 */             } else {
> /* 244 */               final UTF8String element = keys.getUTF8String(index1);
> /* 245 */               arrayWriter.write(index1, element);
> /* 246 */             }
> /* 247 */           }
> /* 248 */         }
> /* 249 */
> /* 250 */         // Write the numBytes of key array into the first 8 bytes.
> /* 251 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
> /* 252 */
> /* 253 */
> /* 254 */         if (values1 instanceof UnsafeArrayData) {
> /* 255 */
> /* 256 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
> /* 257 */           // grow the global buffer before writing data.
> /* 258 */           holder.grow(sizeInBytes4);
> /* 259 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
> /* 260 */           holder.cursor += sizeInBytes4;
> /* 261 */
> /* 262 */         } else {
> /* 263 */           final int numElements1 = values1.numElements();
> /* 264 */           arrayWriter1.initialize(holder, numElements1, 8);
> /* 265 */
> /* 266 */           for (int index2 = 0; index2 < numElements1; index2++) {
> /* 267 */             if (values1.isNullAt(index2)) {
> /* 268 */               arrayWriter1.setNull(index2);
> /* 269 */             } else {
> /* 270 */               final InternalRow element1 = values1.getStruct(index2, 1);
> /* 271 */
> /* 272 */               final int tmpCursor3 = holder.cursor;
> /* 273 */
> /* 274 */               if (element1 instanceof UnsafeRow) {
> /* 275 */
> /* 276 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
> /* 277 */                 // grow the global buffer before writing data.
> /* 278 */                 holder.grow(sizeInBytes2);
> /* 279 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
> /* 280 */                 holder.cursor += sizeInBytes2;
> /* 281 */
> /* 282 */               } else {
> /* 283 */                 rowWriter1.reset();
> /* 284 */
> /* 285 */                 final ArrayData fieldName = element1.getArray(0);
> /* 286 */                 if (element1.isNullAt(0)) {
> /* 287 */                   rowWriter1.setNullAt(0);
> /* 288 */                 } else {
> /* 289 */                   // Remember the current cursor so that we can calculate how many bytes are
> /* 290 */                   // written later.
> /* 291 */                   final int tmpCursor4 = holder.cursor;
> /* 292 */
> /* 293 */                   if (fieldName instanceof UnsafeArrayData) {
> /* 294 */
> /* 295 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
> /* 296 */                     // grow the global buffer before writing data.
> /* 297 */                     holder.grow(sizeInBytes3);
> /* 298 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
> /* 299 */                     holder.cursor += sizeInBytes3;
> /* 300 */
> /* 301 */                   } else {
> /* 302 */                     final int numElements2 = fieldName.numElements();
> /* 303 */                     arrayWriter2.initialize(holder, numElements2, 8);
> /* 304 */
> /* 305 */                     for (int index3 = 0; index3 < numElements2; index3++) {
> /* 306 */                       if (fieldName.isNullAt(index3)) {
> /* 307 */                         arrayWriter2.setNullDouble(index3);
> /* 308 */                       } else {
> /* 309 */                         final double element2 = fieldName.getDouble(index3);
> /* 310 */                         arrayWriter2.write(index3, element2);
> /* 311 */                       }
> /* 312 */                     }
> /* 313 */                   }
> /* 314 */
> /* 315 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
> /* 316 */                 }
> /* 317 */               }
> /* 318 */
> /* 319 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
> /* 320 */
> /* 321 */             }
> /* 322 */           }
> /* 323 */         }
> /* 324 */
> /* 325 */       }
> /* 326 */
> /* 327 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
> /* 328 */     }
> /* 329 */     result.setTotalSize(holder.totalSize());
> /* 330 */     return result;
> /* 331 */   }
> /* 332 */ }
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:941)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:998)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:995)
> 	at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> 	at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> 	... 30 more
> Caused by: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
> 	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
> 	at org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:6639)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5001)
> 	at org.codehaus.janino.UnitCompiler.access$10500(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$13.visitAmbiguousName(UnitCompiler.java:4984)
> 	at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:3633)
> 	at org.codehaus.janino.Java$Lvalue.accept(Java.java:3563)
> 	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
> 	at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4925)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
> 	at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
> 	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
> 	at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
> 	at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
> 	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
> 	at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
> 	at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
> 	at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
> 	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
> 	at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
> 	at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
> 	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
> 	at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
> 	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
> 	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
> 	at org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
> 	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
> 	at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
> 	at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
> 	at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
> 	at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
> 	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
> 	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:935)
> 	... 34 more
> 17/07/12 21:59:35 INFO SparkContext: Invoking stop() from shutdown hook
> 17/07/12 21:59:35 INFO SparkUI: Stopped Spark web UI at http://192.168.0.101:4040
> 17/07/12 21:59:35 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
> 17/07/12 21:59:35 INFO MemoryStore: MemoryStore cleared
> 17/07/12 21:59:35 INFO BlockManager: BlockManager stopped
> 17/07/12 21:59:35 INFO BlockManagerMaster: BlockManagerMaster stopped
> 17/07/12 21:59:35 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
> 17/07/12 21:59:35 INFO SparkContext: Successfully stopped SparkContext
> 17/07/12 21:59:35 INFO ShutdownHookManager: Shutdown hook called
> 17/07/12 21:59:35 INFO ShutdownHookManager: Deleting directory /private/var/folders/fw/lg6m2xps12bfwd7x0hj3rpzm0000gn/T/spark-5587bc9b-fc45-4b9a-a7b4-6d5cfec48eaf
> Process finished with exit code 1



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org