You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "indraneel rao (JIRA)" <ji...@apache.org> on 2017/07/12 16:26:00 UTC

[jira] [Created] (SPARK-21391) Cannot convert a Seq of Map whose value type is again a seq, into a dataset

indraneel rao created SPARK-21391:
-------------------------------------

             Summary: Cannot convert a Seq of Map whose value type is again a seq, into a dataset 
                 Key: SPARK-21391
                 URL: https://issues.apache.org/jira/browse/SPARK-21391
             Project: Spark
          Issue Type: Bug
          Components: SQL
    Affects Versions: 2.1.0
         Environment: Seen on mac OSX, scala 2.11, java 8
            Reporter: indraneel rao


There is an error while trying to create a dataset from a sequence of Maps, whose values have any kind of collections. Even when they are wrapped in a case class. 
Eg : The following piece of code throws an error:
   
{code:java}
 case class Values(values: Option[Seq[Double]])
    case class ItemProperties(properties:Map[String,Values])

    case class A(values :Set[Double])
    val values3 = Set(1.0,2.0,3.0)
    spark.createDataset(Seq(values3)).show()
    val l1 = List(ItemProperties(
      Map(
        "A1" -> Values(Some(Seq(1.0,2.0))),
        "B1" -> Values(Some(Seq(44.0,55.0)))
      )
    ),
      ItemProperties(
        Map(
          "A2" -> Values(Some(Seq(123.0,25.0))),
          "B2" -> Values(Some(Seq(445.0,35.0)))
        )
      )
    )

    l1.toDS().show()
{code}


Heres the error:

17/07/12 21:49:31 ERROR CodeGenerator: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */   return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */   private Object[] references;
/* 008 */   private boolean resultIsNull;
/* 009 */   private java.lang.String argValue;
/* 010 */   private Object[] values;
/* 011 */   private boolean resultIsNull1;
/* 012 */   private scala.collection.Seq argValue1;
/* 013 */   private boolean isNull12;
/* 014 */   private boolean value12;
/* 015 */   private boolean isNull13;
/* 016 */   private InternalRow value13;
/* 017 */   private boolean isNull14;
/* 018 */   private InternalRow value14;
/* 019 */   private UnsafeRow result;
/* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
/* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
/* 026 */
/* 027 */   public SpecificUnsafeProjection(Object[] references) {
/* 028 */     this.references = references;
/* 029 */
/* 030 */
/* 031 */     this.values = null;
/* 032 */
/* 033 */
/* 034 */     isNull12 = false;
/* 035 */     value12 = false;
/* 036 */     isNull13 = false;
/* 037 */     value13 = null;
/* 038 */     isNull14 = false;
/* 039 */     value14 = null;
/* 040 */     result = new UnsafeRow(1);
/* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 047 */
/* 048 */   }
/* 049 */
/* 050 */   public void initialize(int partitionIndex) {
/* 051 */
/* 052 */   }
/* 053 */
/* 054 */
/* 055 */   private void evalIfTrueExpr(InternalRow i) {
/* 056 */     final InternalRow value7 = null;
/* 057 */     isNull13 = true;
/* 058 */     value13 = value7;
/* 059 */   }
/* 060 */
/* 061 */
/* 062 */   private void evalIfCondExpr(InternalRow i) {
/* 063 */
/* 064 */     isNull12 = false;
/* 065 */     value12 = ExternalMapToCatalyst_value_isNull0;
/* 066 */   }
/* 067 */
/* 068 */
/* 069 */   private void evalIfFalseExpr(InternalRow i) {
/* 070 */     values = new Object[1];
/* 071 */     resultIsNull1 = false;
/* 072 */     if (!resultIsNull1) {
/* 073 */
/* 074 */       boolean isNull11 = true;
/* 075 */       scala.Option value11 = null;
/* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
/* 077 */
/* 078 */         isNull11 = false;
/* 079 */         if (!isNull11) {
/* 080 */
/* 081 */           Object funcResult1 = null;
/* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
/* 083 */           if (funcResult1 == null) {
/* 084 */             isNull11 = true;
/* 085 */           } else {
/* 086 */             value11 = (scala.Option) funcResult1;
/* 087 */           }
/* 088 */
/* 089 */         }
/* 090 */         isNull11 = value11 == null;
/* 091 */       }
/* 092 */
/* 093 */       final boolean isNull10 = isNull11 || value11.isEmpty();
/* 094 */       scala.collection.Seq value10 = isNull10 ?
/* 095 */       null : (scala.collection.Seq) value11.get();
/* 096 */       resultIsNull1 = isNull10;
/* 097 */       argValue1 = value10;
/* 098 */     }
/* 099 */
/* 100 */
/* 101 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
/* 102 */     if (resultIsNull1) {
/* 103 */       values[0] = null;
/* 104 */     } else {
/* 105 */       values[0] = value9;
/* 106 */     }
/* 107 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 108 */     this.values = null;
/* 109 */     isNull14 = false;
/* 110 */     value14 = value8;
/* 111 */   }
/* 112 */
/* 113 */
/* 114 */   // Scala.Function1 need this
/* 115 */   public java.lang.Object apply(java.lang.Object row) {
/* 116 */     return apply((InternalRow) row);
/* 117 */   }
/* 118 */
/* 119 */   public UnsafeRow apply(InternalRow i) {
/* 120 */     holder.reset();
/* 121 */
/* 122 */     rowWriter.zeroOutNullBytes();
/* 123 */
/* 124 */
/* 125 */     boolean isNull3 = i.isNullAt(0);
/* 126 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
/* 127 */
/* 128 */     if (isNull3) {
/* 129 */       throw new RuntimeException(((java.lang.String) references[0]));
/* 130 */     }
/* 131 */     boolean isNull1 = true;
/* 132 */     scala.collection.immutable.Map value1 = null;
/* 133 */     if (!false) {
/* 134 */
/* 135 */       isNull1 = false;
/* 136 */       if (!isNull1) {
/* 137 */
/* 138 */         Object funcResult = null;
/* 139 */         funcResult = value3.properties();
/* 140 */         if (funcResult == null) {
/* 141 */           isNull1 = true;
/* 142 */         } else {
/* 143 */           value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */         }
/* 145 */
/* 146 */       }
/* 147 */       isNull1 = value1 == null;
/* 148 */     }
/* 149 */     MapData value = null;
/* 150 */     if (!isNull1) {
/* 151 */       final int length = value1.size();
/* 152 */       final Object[] convertedKeys = new Object[length];
/* 153 */       final Object[] convertedValues = new Object[length];
/* 154 */       int index = 0;
/* 155 */       final scala.collection.Iterator entries = value1.iterator();
/* 156 */       while(entries.hasNext()) {
/* 157 */
/* 158 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 159 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
/* 160 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
/* 161 */
/* 162 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
/* 163 */
/* 164 */
/* 165 */         resultIsNull = false;
/* 166 */         if (!resultIsNull) {
/* 167 */
/* 168 */           resultIsNull = false;
/* 169 */           argValue = ExternalMapToCatalyst_key0;
/* 170 */         }
/* 171 */
/* 172 */         boolean isNull4 = resultIsNull;
/* 173 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 174 */         isNull4 = value4 == null;
/* 175 */         if (isNull4) {
/* 176 */           throw new RuntimeException("Cannot use null as map key!");
/* 177 */         } else {
/* 178 */           convertedKeys[index] = (UTF8String) value4;
/* 179 */         }
/* 180 */
/* 181 */
/* 182 */         evalIfCondExpr(i);
/* 183 */         boolean isNull5 = false;
/* 184 */         InternalRow value5 = null;
/* 185 */         if (!isNull12 && value12) {
/* 186 */           evalIfTrueExpr(i);
/* 187 */           isNull5 = isNull13;
/* 188 */           value5 = value13;
/* 189 */         } else {
/* 190 */           evalIfFalseExpr(i);
/* 191 */           isNull5 = isNull14;
/* 192 */           value5 = value14;
/* 193 */         }
/* 194 */         if (isNull5) {
/* 195 */           convertedValues[index] = null;
/* 196 */         } else {
/* 197 */           convertedValues[index] = (InternalRow) value5;
/* 198 */         }
/* 199 */
/* 200 */         index++;
/* 201 */       }
/* 202 */
/* 203 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 204 */     }
/* 205 */     if (isNull1) {
/* 206 */       rowWriter.setNullAt(0);
/* 207 */     } else {
/* 208 */       // Remember the current cursor so that we can calculate how many bytes are
/* 209 */       // written later.
/* 210 */       final int tmpCursor = holder.cursor;
/* 211 */
/* 212 */       if (value instanceof UnsafeMapData) {
/* 213 */
/* 214 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
/* 215 */         // grow the global buffer before writing data.
/* 216 */         holder.grow(sizeInBytes);
/* 217 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
/* 218 */         holder.cursor += sizeInBytes;
/* 219 */
/* 220 */       } else {
/* 221 */         final ArrayData keys = value.keyArray();
/* 222 */         final ArrayData values1 = value.valueArray();
/* 223 */
/* 224 */         // preserve 8 bytes to write the key array numBytes later.
/* 225 */         holder.grow(8);
/* 226 */         holder.cursor += 8;
/* 227 */
/* 228 */         // Remember the current cursor so that we can write numBytes of key array later.
/* 229 */         final int tmpCursor1 = holder.cursor;
/* 230 */
/* 231 */
/* 232 */         if (keys instanceof UnsafeArrayData) {
/* 233 */
/* 234 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
/* 235 */           // grow the global buffer before writing data.
/* 236 */           holder.grow(sizeInBytes1);
/* 237 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
/* 238 */           holder.cursor += sizeInBytes1;
/* 239 */
/* 240 */         } else {
/* 241 */           final int numElements = keys.numElements();
/* 242 */           arrayWriter.initialize(holder, numElements, 8);
/* 243 */
/* 244 */           for (int index1 = 0; index1 < numElements; index1++) {
/* 245 */             if (keys.isNullAt(index1)) {
/* 246 */               arrayWriter.setNull(index1);
/* 247 */             } else {
/* 248 */               final UTF8String element = keys.getUTF8String(index1);
/* 249 */               arrayWriter.write(index1, element);
/* 250 */             }
/* 251 */           }
/* 252 */         }
/* 253 */
/* 254 */         // Write the numBytes of key array into the first 8 bytes.
/* 255 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
/* 256 */
/* 257 */
/* 258 */         if (values1 instanceof UnsafeArrayData) {
/* 259 */
/* 260 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
/* 261 */           // grow the global buffer before writing data.
/* 262 */           holder.grow(sizeInBytes4);
/* 263 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
/* 264 */           holder.cursor += sizeInBytes4;
/* 265 */
/* 266 */         } else {
/* 267 */           final int numElements1 = values1.numElements();
/* 268 */           arrayWriter1.initialize(holder, numElements1, 8);
/* 269 */
/* 270 */           for (int index2 = 0; index2 < numElements1; index2++) {
/* 271 */             if (values1.isNullAt(index2)) {
/* 272 */               arrayWriter1.setNull(index2);
/* 273 */             } else {
/* 274 */               final InternalRow element1 = values1.getStruct(index2, 1);
/* 275 */
/* 276 */               final int tmpCursor3 = holder.cursor;
/* 277 */
/* 278 */               if (element1 instanceof UnsafeRow) {
/* 279 */
/* 280 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
/* 281 */                 // grow the global buffer before writing data.
/* 282 */                 holder.grow(sizeInBytes2);
/* 283 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
/* 284 */                 holder.cursor += sizeInBytes2;
/* 285 */
/* 286 */               } else {
/* 287 */                 rowWriter1.reset();
/* 288 */
/* 289 */                 final ArrayData fieldName = element1.getArray(0);
/* 290 */                 if (element1.isNullAt(0)) {
/* 291 */                   rowWriter1.setNullAt(0);
/* 292 */                 } else {
/* 293 */                   // Remember the current cursor so that we can calculate how many bytes are
/* 294 */                   // written later.
/* 295 */                   final int tmpCursor4 = holder.cursor;
/* 296 */
/* 297 */                   if (fieldName instanceof UnsafeArrayData) {
/* 298 */
/* 299 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
/* 300 */                     // grow the global buffer before writing data.
/* 301 */                     holder.grow(sizeInBytes3);
/* 302 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
/* 303 */                     holder.cursor += sizeInBytes3;
/* 304 */
/* 305 */                   } else {
/* 306 */                     final int numElements2 = fieldName.numElements();
/* 307 */                     arrayWriter2.initialize(holder, numElements2, 8);
/* 308 */
/* 309 */                     for (int index3 = 0; index3 < numElements2; index3++) {
/* 310 */                       if (fieldName.isNullAt(index3)) {
/* 311 */                         arrayWriter2.setNullDouble(index3);
/* 312 */                       } else {
/* 313 */                         final double element2 = fieldName.getDouble(index3);
/* 314 */                         arrayWriter2.write(index3, element2);
/* 315 */                       }
/* 316 */                     }
/* 317 */                   }
/* 318 */
/* 319 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
/* 320 */                 }
/* 321 */               }
/* 322 */
/* 323 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
/* 324 */
/* 325 */             }
/* 326 */           }
/* 327 */         }
/* 328 */
/* 329 */       }
/* 330 */
/* 331 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
/* 332 */     }
/* 333 */     result.setTotalSize(holder.totalSize());
/* 334 */     return result;
/* 335 */   }
/* 336 */ }

org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
	at org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:6639)
	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5001)
	at org.codehaus.janino.UnitCompiler.access$10500(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$13.visitAmbiguousName(UnitCompiler.java:4984)
	at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:3633)
	at org.codehaus.janino.Java$Lvalue.accept(Java.java:3563)
	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
	at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4925)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
	at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
	at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
	at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
	at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
	at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
	at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
	at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
	at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
	at org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
	at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
	at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
	at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
	at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:935)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:998)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:995)
	at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
	at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
	at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
	at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
	at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
	at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
	at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:890)
	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:405)
	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:359)
	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:874)
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:266)
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:266)
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:290)
	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
	at scala.collection.immutable.List.foreach(List.scala:383)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
	at scala.collection.immutable.List.map(List.scala:286)
	at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:429)
	at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:389)
	at org.apache.spark.sql.SQLImplicits.localSeqToDatasetHolder(SQLImplicits.scala:173)
	at local.TestApp$.main(TestApp.scala:63)
	at local.TestApp.main(TestApp.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Exception in thread "main" java.lang.RuntimeException: Error while encoding: java.util.concurrent.ExecutionException: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */   return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */   private Object[] references;
/* 008 */   private boolean resultIsNull;
/* 009 */   private java.lang.String argValue;
/* 010 */   private Object[] values;
/* 011 */   private boolean resultIsNull1;
/* 012 */   private scala.collection.Seq argValue1;
/* 013 */   private boolean isNull12;
/* 014 */   private boolean value12;
/* 015 */   private boolean isNull13;
/* 016 */   private InternalRow value13;
/* 017 */   private boolean isNull14;
/* 018 */   private InternalRow value14;
/* 019 */   private UnsafeRow result;
/* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
/* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
/* 026 */
/* 027 */   public SpecificUnsafeProjection(Object[] references) {
/* 028 */     this.references = references;
/* 029 */
/* 030 */
/* 031 */     this.values = null;
/* 032 */
/* 033 */
/* 034 */     isNull12 = false;
/* 035 */     value12 = false;
/* 036 */     isNull13 = false;
/* 037 */     value13 = null;
/* 038 */     isNull14 = false;
/* 039 */     value14 = null;
/* 040 */     result = new UnsafeRow(1);
/* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 047 */
/* 048 */   }
/* 049 */
/* 050 */   public void initialize(int partitionIndex) {
/* 051 */
/* 052 */   }
/* 053 */
/* 054 */
/* 055 */   private void evalIfTrueExpr(InternalRow i) {
/* 056 */     final InternalRow value7 = null;
/* 057 */     isNull13 = true;
/* 058 */     value13 = value7;
/* 059 */   }
/* 060 */
/* 061 */
/* 062 */   private void evalIfCondExpr(InternalRow i) {
/* 063 */
/* 064 */     isNull12 = false;
/* 065 */     value12 = ExternalMapToCatalyst_value_isNull0;
/* 066 */   }
/* 067 */
/* 068 */
/* 069 */   private void evalIfFalseExpr(InternalRow i) {
/* 070 */     values = new Object[1];
/* 071 */     resultIsNull1 = false;
/* 072 */     if (!resultIsNull1) {
/* 073 */
/* 074 */       boolean isNull11 = true;
/* 075 */       scala.Option value11 = null;
/* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
/* 077 */
/* 078 */         isNull11 = false;
/* 079 */         if (!isNull11) {
/* 080 */
/* 081 */           Object funcResult1 = null;
/* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
/* 083 */           if (funcResult1 == null) {
/* 084 */             isNull11 = true;
/* 085 */           } else {
/* 086 */             value11 = (scala.Option) funcResult1;
/* 087 */           }
/* 088 */
/* 089 */         }
/* 090 */         isNull11 = value11 == null;
/* 091 */       }
/* 092 */
/* 093 */       final boolean isNull10 = isNull11 || value11.isEmpty();
/* 094 */       scala.collection.Seq value10 = isNull10 ?
/* 095 */       null : (scala.collection.Seq) value11.get();
/* 096 */       resultIsNull1 = isNull10;
/* 097 */       argValue1 = value10;
/* 098 */     }
/* 099 */
/* 100 */
/* 101 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
/* 102 */     if (resultIsNull1) {
/* 103 */       values[0] = null;
/* 104 */     } else {
/* 105 */       values[0] = value9;
/* 106 */     }
/* 107 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 108 */     this.values = null;
/* 109 */     isNull14 = false;
/* 110 */     value14 = value8;
/* 111 */   }
/* 112 */
/* 113 */
/* 114 */   // Scala.Function1 need this
/* 115 */   public java.lang.Object apply(java.lang.Object row) {
/* 116 */     return apply((InternalRow) row);
/* 117 */   }
/* 118 */
/* 119 */   public UnsafeRow apply(InternalRow i) {
/* 120 */     holder.reset();
/* 121 */
/* 122 */     rowWriter.zeroOutNullBytes();
/* 123 */
/* 124 */
/* 125 */     boolean isNull3 = i.isNullAt(0);
/* 126 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
/* 127 */
/* 128 */     if (isNull3) {
/* 129 */       throw new RuntimeException(((java.lang.String) references[0]));
/* 130 */     }
/* 131 */     boolean isNull1 = true;
/* 132 */     scala.collection.immutable.Map value1 = null;
/* 133 */     if (!false) {
/* 134 */
/* 135 */       isNull1 = false;
/* 136 */       if (!isNull1) {
/* 137 */
/* 138 */         Object funcResult = null;
/* 139 */         funcResult = value3.properties();
/* 140 */         if (funcResult == null) {
/* 141 */           isNull1 = true;
/* 142 */         } else {
/* 143 */           value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */         }
/* 145 */
/* 146 */       }
/* 147 */       isNull1 = value1 == null;
/* 148 */     }
/* 149 */     MapData value = null;
/* 150 */     if (!isNull1) {
/* 151 */       final int length = value1.size();
/* 152 */       final Object[] convertedKeys = new Object[length];
/* 153 */       final Object[] convertedValues = new Object[length];
/* 154 */       int index = 0;
/* 155 */       final scala.collection.Iterator entries = value1.iterator();
/* 156 */       while(entries.hasNext()) {
/* 157 */
/* 158 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 159 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
/* 160 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
/* 161 */
/* 162 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
/* 163 */
/* 164 */
/* 165 */         resultIsNull = false;
/* 166 */         if (!resultIsNull) {
/* 167 */
/* 168 */           resultIsNull = false;
/* 169 */           argValue = ExternalMapToCatalyst_key0;
/* 170 */         }
/* 171 */
/* 172 */         boolean isNull4 = resultIsNull;
/* 173 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 174 */         isNull4 = value4 == null;
/* 175 */         if (isNull4) {
/* 176 */           throw new RuntimeException("Cannot use null as map key!");
/* 177 */         } else {
/* 178 */           convertedKeys[index] = (UTF8String) value4;
/* 179 */         }
/* 180 */
/* 181 */
/* 182 */         evalIfCondExpr(i);
/* 183 */         boolean isNull5 = false;
/* 184 */         InternalRow value5 = null;
/* 185 */         if (!isNull12 && value12) {
/* 186 */           evalIfTrueExpr(i);
/* 187 */           isNull5 = isNull13;
/* 188 */           value5 = value13;
/* 189 */         } else {
/* 190 */           evalIfFalseExpr(i);
/* 191 */           isNull5 = isNull14;
/* 192 */           value5 = value14;
/* 193 */         }
/* 194 */         if (isNull5) {
/* 195 */           convertedValues[index] = null;
/* 196 */         } else {
/* 197 */           convertedValues[index] = (InternalRow) value5;
/* 198 */         }
/* 199 */
/* 200 */         index++;
/* 201 */       }
/* 202 */
/* 203 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 204 */     }
/* 205 */     if (isNull1) {
/* 206 */       rowWriter.setNullAt(0);
/* 207 */     } else {
/* 208 */       // Remember the current cursor so that we can calculate how many bytes are
/* 209 */       // written later.
/* 210 */       final int tmpCursor = holder.cursor;
/* 211 */
/* 212 */       if (value instanceof UnsafeMapData) {
/* 213 */
/* 214 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
/* 215 */         // grow the global buffer before writing data.
/* 216 */         holder.grow(sizeInBytes);
/* 217 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
/* 218 */         holder.cursor += sizeInBytes;
/* 219 */
/* 220 */       } else {
/* 221 */         final ArrayData keys = value.keyArray();
/* 222 */         final ArrayData values1 = value.valueArray();
/* 223 */
/* 224 */         // preserve 8 bytes to write the key array numBytes later.
/* 225 */         holder.grow(8);
/* 226 */         holder.cursor += 8;
/* 227 */
/* 228 */         // Remember the current cursor so that we can write numBytes of key array later.
/* 229 */         final int tmpCursor1 = holder.cursor;
/* 230 */
/* 231 */
/* 232 */         if (keys instanceof UnsafeArrayData) {
/* 233 */
/* 234 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
/* 235 */           // grow the global buffer before writing data.
/* 236 */           holder.grow(sizeInBytes1);
/* 237 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
/* 238 */           holder.cursor += sizeInBytes1;
/* 239 */
/* 240 */         } else {
/* 241 */           final int numElements = keys.numElements();
/* 242 */           arrayWriter.initialize(holder, numElements, 8);
/* 243 */
/* 244 */           for (int index1 = 0; index1 < numElements; index1++) {
/* 245 */             if (keys.isNullAt(index1)) {
/* 246 */               arrayWriter.setNull(index1);
/* 247 */             } else {
/* 248 */               final UTF8String element = keys.getUTF8String(index1);
/* 249 */               arrayWriter.write(index1, element);
/* 250 */             }
/* 251 */           }
/* 252 */         }
/* 253 */
/* 254 */         // Write the numBytes of key array into the first 8 bytes.
/* 255 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
/* 256 */
/* 257 */
/* 258 */         if (values1 instanceof UnsafeArrayData) {
/* 259 */
/* 260 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
/* 261 */           // grow the global buffer before writing data.
/* 262 */           holder.grow(sizeInBytes4);
/* 263 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
/* 264 */           holder.cursor += sizeInBytes4;
/* 265 */
/* 266 */         } else {
/* 267 */           final int numElements1 = values1.numElements();
/* 268 */           arrayWriter1.initialize(holder, numElements1, 8);
/* 269 */
/* 270 */           for (int index2 = 0; index2 < numElements1; index2++) {
/* 271 */             if (values1.isNullAt(index2)) {
/* 272 */               arrayWriter1.setNull(index2);
/* 273 */             } else {
/* 274 */               final InternalRow element1 = values1.getStruct(index2, 1);
/* 275 */
/* 276 */               final int tmpCursor3 = holder.cursor;
/* 277 */
/* 278 */               if (element1 instanceof UnsafeRow) {
/* 279 */
/* 280 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
/* 281 */                 // grow the global buffer before writing data.
/* 282 */                 holder.grow(sizeInBytes2);
/* 283 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
/* 284 */                 holder.cursor += sizeInBytes2;
/* 285 */
/* 286 */               } else {
/* 287 */                 rowWriter1.reset();
/* 288 */
/* 289 */                 final ArrayData fieldName = element1.getArray(0);
/* 290 */                 if (element1.isNullAt(0)) {
/* 291 */                   rowWriter1.setNullAt(0);
/* 292 */                 } else {
/* 293 */                   // Remember the current cursor so that we can calculate how many bytes are
/* 294 */                   // written later.
/* 295 */                   final int tmpCursor4 = holder.cursor;
/* 296 */
/* 297 */                   if (fieldName instanceof UnsafeArrayData) {
/* 298 */
/* 299 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
/* 300 */                     // grow the global buffer before writing data.
/* 301 */                     holder.grow(sizeInBytes3);
/* 302 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
/* 303 */                     holder.cursor += sizeInBytes3;
/* 304 */
/* 305 */                   } else {
/* 306 */                     final int numElements2 = fieldName.numElements();
/* 307 */                     arrayWriter2.initialize(holder, numElements2, 8);
/* 308 */
/* 309 */                     for (int index3 = 0; index3 < numElements2; index3++) {
/* 310 */                       if (fieldName.isNullAt(index3)) {
/* 311 */                         arrayWriter2.setNullDouble(index3);
/* 312 */                       } else {
/* 313 */                         final double element2 = fieldName.getDouble(index3);
/* 314 */                         arrayWriter2.write(index3, element2);
/* 315 */                       }
/* 316 */                     }
/* 317 */                   }
/* 318 */
/* 319 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
/* 320 */                 }
/* 321 */               }
/* 322 */
/* 323 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
/* 324 */
/* 325 */             }
/* 326 */           }
/* 327 */         }
/* 328 */
/* 329 */       }
/* 330 */
/* 331 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
/* 332 */     }
/* 333 */     result.setTotalSize(holder.totalSize());
/* 334 */     return result;
/* 335 */   }
/* 336 */ }

externalmaptocatalyst(ExternalMapToCatalyst_key0, ObjectType(class java.lang.String), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key0, false, ObjectType(class java.lang.String)), true), ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values), if (isnull(lambdavariable(ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values)))) null else named_struct(values, newInstance(class org.apache.spark.sql.catalyst.util.GenericArrayData)), assertnotnull(input[0, local.ItemProperties, true], top level Product input object).properties) AS properties#0
+- externalmaptocatalyst(ExternalMapToCatalyst_key0, ObjectType(class java.lang.String), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, lambdavariable(ExternalMapToCatalyst_key0, false, ObjectType(class java.lang.String)), true), ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values), if (isnull(lambdavariable(ExternalMapToCatalyst_value0, ExternalMapToCatalyst_value_isNull0, ObjectType(class local.Values)))) null else named_struct(values, newInstance(class org.apache.spark.sql.catalyst.util.GenericArrayData)), assertnotnull(input[0, local.ItemProperties, true], top level Product input object).properties)
   +- assertnotnull(input[0, local.ItemProperties, true], top level Product input object).properties
      +- assertnotnull(input[0, local.ItemProperties, true], top level Product input object)
         +- input[0, local.ItemProperties, true]

	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:293)
	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
	at org.apache.spark.sql.SparkSession$$anonfun$2.apply(SparkSession.scala:429)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
	at scala.collection.immutable.List.foreach(List.scala:383)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
	at scala.collection.immutable.List.map(List.scala:286)
	at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:429)
	at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:389)
	at org.apache.spark.sql.SQLImplicits.localSeqToDatasetHolder(SQLImplicits.scala:173)
	at local.TestApp$.main(TestApp.scala:63)
	at local.TestApp.main(TestApp.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Caused by: java.util.concurrent.ExecutionException: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */   return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */   private Object[] references;
/* 008 */   private boolean resultIsNull;
/* 009 */   private java.lang.String argValue;
/* 010 */   private Object[] values;
/* 011 */   private boolean resultIsNull1;
/* 012 */   private scala.collection.Seq argValue1;
/* 013 */   private boolean isNull12;
/* 014 */   private boolean value12;
/* 015 */   private boolean isNull13;
/* 016 */   private InternalRow value13;
/* 017 */   private boolean isNull14;
/* 018 */   private InternalRow value14;
/* 019 */   private UnsafeRow result;
/* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
/* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
/* 026 */
/* 027 */   public SpecificUnsafeProjection(Object[] references) {
/* 028 */     this.references = references;
/* 029 */
/* 030 */
/* 031 */     this.values = null;
/* 032 */
/* 033 */
/* 034 */     isNull12 = false;
/* 035 */     value12 = false;
/* 036 */     isNull13 = false;
/* 037 */     value13 = null;
/* 038 */     isNull14 = false;
/* 039 */     value14 = null;
/* 040 */     result = new UnsafeRow(1);
/* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 047 */
/* 048 */   }
/* 049 */
/* 050 */   public void initialize(int partitionIndex) {
/* 051 */
/* 052 */   }
/* 053 */
/* 054 */
/* 055 */   private void evalIfTrueExpr(InternalRow i) {
/* 056 */     final InternalRow value7 = null;
/* 057 */     isNull13 = true;
/* 058 */     value13 = value7;
/* 059 */   }
/* 060 */
/* 061 */
/* 062 */   private void evalIfCondExpr(InternalRow i) {
/* 063 */
/* 064 */     isNull12 = false;
/* 065 */     value12 = ExternalMapToCatalyst_value_isNull0;
/* 066 */   }
/* 067 */
/* 068 */
/* 069 */   private void evalIfFalseExpr(InternalRow i) {
/* 070 */     values = new Object[1];
/* 071 */     resultIsNull1 = false;
/* 072 */     if (!resultIsNull1) {
/* 073 */
/* 074 */       boolean isNull11 = true;
/* 075 */       scala.Option value11 = null;
/* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
/* 077 */
/* 078 */         isNull11 = false;
/* 079 */         if (!isNull11) {
/* 080 */
/* 081 */           Object funcResult1 = null;
/* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
/* 083 */           if (funcResult1 == null) {
/* 084 */             isNull11 = true;
/* 085 */           } else {
/* 086 */             value11 = (scala.Option) funcResult1;
/* 087 */           }
/* 088 */
/* 089 */         }
/* 090 */         isNull11 = value11 == null;
/* 091 */       }
/* 092 */
/* 093 */       final boolean isNull10 = isNull11 || value11.isEmpty();
/* 094 */       scala.collection.Seq value10 = isNull10 ?
/* 095 */       null : (scala.collection.Seq) value11.get();
/* 096 */       resultIsNull1 = isNull10;
/* 097 */       argValue1 = value10;
/* 098 */     }
/* 099 */
/* 100 */
/* 101 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
/* 102 */     if (resultIsNull1) {
/* 103 */       values[0] = null;
/* 104 */     } else {
/* 105 */       values[0] = value9;
/* 106 */     }
/* 107 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 108 */     this.values = null;
/* 109 */     isNull14 = false;
/* 110 */     value14 = value8;
/* 111 */   }
/* 112 */
/* 113 */
/* 114 */   // Scala.Function1 need this
/* 115 */   public java.lang.Object apply(java.lang.Object row) {
/* 116 */     return apply((InternalRow) row);
/* 117 */   }
/* 118 */
/* 119 */   public UnsafeRow apply(InternalRow i) {
/* 120 */     holder.reset();
/* 121 */
/* 122 */     rowWriter.zeroOutNullBytes();
/* 123 */
/* 124 */
/* 125 */     boolean isNull3 = i.isNullAt(0);
/* 126 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
/* 127 */
/* 128 */     if (isNull3) {
/* 129 */       throw new RuntimeException(((java.lang.String) references[0]));
/* 130 */     }
/* 131 */     boolean isNull1 = true;
/* 132 */     scala.collection.immutable.Map value1 = null;
/* 133 */     if (!false) {
/* 134 */
/* 135 */       isNull1 = false;
/* 136 */       if (!isNull1) {
/* 137 */
/* 138 */         Object funcResult = null;
/* 139 */         funcResult = value3.properties();
/* 140 */         if (funcResult == null) {
/* 141 */           isNull1 = true;
/* 142 */         } else {
/* 143 */           value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */         }
/* 145 */
/* 146 */       }
/* 147 */       isNull1 = value1 == null;
/* 148 */     }
/* 149 */     MapData value = null;
/* 150 */     if (!isNull1) {
/* 151 */       final int length = value1.size();
/* 152 */       final Object[] convertedKeys = new Object[length];
/* 153 */       final Object[] convertedValues = new Object[length];
/* 154 */       int index = 0;
/* 155 */       final scala.collection.Iterator entries = value1.iterator();
/* 156 */       while(entries.hasNext()) {
/* 157 */
/* 158 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 159 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
/* 160 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
/* 161 */
/* 162 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
/* 163 */
/* 164 */
/* 165 */         resultIsNull = false;
/* 166 */         if (!resultIsNull) {
/* 167 */
/* 168 */           resultIsNull = false;
/* 169 */           argValue = ExternalMapToCatalyst_key0;
/* 170 */         }
/* 171 */
/* 172 */         boolean isNull4 = resultIsNull;
/* 173 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 174 */         isNull4 = value4 == null;
/* 175 */         if (isNull4) {
/* 176 */           throw new RuntimeException("Cannot use null as map key!");
/* 177 */         } else {
/* 178 */           convertedKeys[index] = (UTF8String) value4;
/* 179 */         }
/* 180 */
/* 181 */
/* 182 */         evalIfCondExpr(i);
/* 183 */         boolean isNull5 = false;
/* 184 */         InternalRow value5 = null;
/* 185 */         if (!isNull12 && value12) {
/* 186 */           evalIfTrueExpr(i);
/* 187 */           isNull5 = isNull13;
/* 188 */           value5 = value13;
/* 189 */         } else {
/* 190 */           evalIfFalseExpr(i);
/* 191 */           isNull5 = isNull14;
/* 192 */           value5 = value14;
/* 193 */         }
/* 194 */         if (isNull5) {
/* 195 */           convertedValues[index] = null;
/* 196 */         } else {
/* 197 */           convertedValues[index] = (InternalRow) value5;
/* 198 */         }
/* 199 */
/* 200 */         index++;
/* 201 */       }
/* 202 */
/* 203 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 204 */     }
/* 205 */     if (isNull1) {
/* 206 */       rowWriter.setNullAt(0);
/* 207 */     } else {
/* 208 */       // Remember the current cursor so that we can calculate how many bytes are
/* 209 */       // written later.
/* 210 */       final int tmpCursor = holder.cursor;
/* 211 */
/* 212 */       if (value instanceof UnsafeMapData) {
/* 213 */
/* 214 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
/* 215 */         // grow the global buffer before writing data.
/* 216 */         holder.grow(sizeInBytes);
/* 217 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
/* 218 */         holder.cursor += sizeInBytes;
/* 219 */
/* 220 */       } else {
/* 221 */         final ArrayData keys = value.keyArray();
/* 222 */         final ArrayData values1 = value.valueArray();
/* 223 */
/* 224 */         // preserve 8 bytes to write the key array numBytes later.
/* 225 */         holder.grow(8);
/* 226 */         holder.cursor += 8;
/* 227 */
/* 228 */         // Remember the current cursor so that we can write numBytes of key array later.
/* 229 */         final int tmpCursor1 = holder.cursor;
/* 230 */
/* 231 */
/* 232 */         if (keys instanceof UnsafeArrayData) {
/* 233 */
/* 234 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
/* 235 */           // grow the global buffer before writing data.
/* 236 */           holder.grow(sizeInBytes1);
/* 237 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
/* 238 */           holder.cursor += sizeInBytes1;
/* 239 */
/* 240 */         } else {
/* 241 */           final int numElements = keys.numElements();
/* 242 */           arrayWriter.initialize(holder, numElements, 8);
/* 243 */
/* 244 */           for (int index1 = 0; index1 < numElements; index1++) {
/* 245 */             if (keys.isNullAt(index1)) {
/* 246 */               arrayWriter.setNull(index1);
/* 247 */             } else {
/* 248 */               final UTF8String element = keys.getUTF8String(index1);
/* 249 */               arrayWriter.write(index1, element);
/* 250 */             }
/* 251 */           }
/* 252 */         }
/* 253 */
/* 254 */         // Write the numBytes of key array into the first 8 bytes.
/* 255 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
/* 256 */
/* 257 */
/* 258 */         if (values1 instanceof UnsafeArrayData) {
/* 259 */
/* 260 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
/* 261 */           // grow the global buffer before writing data.
/* 262 */           holder.grow(sizeInBytes4);
/* 263 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
/* 264 */           holder.cursor += sizeInBytes4;
/* 265 */
/* 266 */         } else {
/* 267 */           final int numElements1 = values1.numElements();
/* 268 */           arrayWriter1.initialize(holder, numElements1, 8);
/* 269 */
/* 270 */           for (int index2 = 0; index2 < numElements1; index2++) {
/* 271 */             if (values1.isNullAt(index2)) {
/* 272 */               arrayWriter1.setNull(index2);
/* 273 */             } else {
/* 274 */               final InternalRow element1 = values1.getStruct(index2, 1);
/* 275 */
/* 276 */               final int tmpCursor3 = holder.cursor;
/* 277 */
/* 278 */               if (element1 instanceof UnsafeRow) {
/* 279 */
/* 280 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
/* 281 */                 // grow the global buffer before writing data.
/* 282 */                 holder.grow(sizeInBytes2);
/* 283 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
/* 284 */                 holder.cursor += sizeInBytes2;
/* 285 */
/* 286 */               } else {
/* 287 */                 rowWriter1.reset();
/* 288 */
/* 289 */                 final ArrayData fieldName = element1.getArray(0);
/* 290 */                 if (element1.isNullAt(0)) {
/* 291 */                   rowWriter1.setNullAt(0);
/* 292 */                 } else {
/* 293 */                   // Remember the current cursor so that we can calculate how many bytes are
/* 294 */                   // written later.
/* 295 */                   final int tmpCursor4 = holder.cursor;
/* 296 */
/* 297 */                   if (fieldName instanceof UnsafeArrayData) {
/* 298 */
/* 299 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
/* 300 */                     // grow the global buffer before writing data.
/* 301 */                     holder.grow(sizeInBytes3);
/* 302 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
/* 303 */                     holder.cursor += sizeInBytes3;
/* 304 */
/* 305 */                   } else {
/* 306 */                     final int numElements2 = fieldName.numElements();
/* 307 */                     arrayWriter2.initialize(holder, numElements2, 8);
/* 308 */
/* 309 */                     for (int index3 = 0; index3 < numElements2; index3++) {
/* 310 */                       if (fieldName.isNullAt(index3)) {
/* 311 */                         arrayWriter2.setNullDouble(index3);
/* 312 */                       } else {
/* 313 */                         final double element2 = fieldName.getDouble(index3);
/* 314 */                         arrayWriter2.write(index3, element2);
/* 315 */                       }
/* 316 */                     }
/* 317 */                   }
/* 318 */
/* 319 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
/* 320 */                 }
/* 321 */               }
/* 322 */
/* 323 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
/* 324 */
/* 325 */             }
/* 326 */           }
/* 327 */         }
/* 328 */
/* 329 */       }
/* 330 */
/* 331 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
/* 332 */     }
/* 333 */     result.setTotalSize(holder.totalSize());
/* 334 */     return result;
/* 335 */   }
/* 336 */ }

	at org.spark_project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
	at org.spark_project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
	at org.spark_project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
	at org.spark_project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
	at org.spark_project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
	at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
	at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
	at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
	at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
	at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
	at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:890)
	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:405)
	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:359)
	at org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:874)
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection$lzycompute(ExpressionEncoder.scala:266)
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.extractProjection(ExpressionEncoder.scala:266)
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder.toRow(ExpressionEncoder.scala:290)
	... 17 more
Caused by: java.lang.Exception: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */   return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */   private Object[] references;
/* 008 */   private boolean resultIsNull;
/* 009 */   private java.lang.String argValue;
/* 010 */   private Object[] values;
/* 011 */   private boolean resultIsNull1;
/* 012 */   private scala.collection.Seq argValue1;
/* 013 */   private boolean isNull12;
/* 014 */   private boolean value12;
/* 015 */   private boolean isNull13;
/* 016 */   private InternalRow value13;
/* 017 */   private boolean isNull14;
/* 018 */   private InternalRow value14;
/* 019 */   private UnsafeRow result;
/* 020 */   private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 021 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 022 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter;
/* 023 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter1;
/* 024 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter1;
/* 025 */   private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter arrayWriter2;
/* 026 */
/* 027 */   public SpecificUnsafeProjection(Object[] references) {
/* 028 */     this.references = references;
/* 029 */
/* 030 */
/* 031 */     this.values = null;
/* 032 */
/* 033 */
/* 034 */     isNull12 = false;
/* 035 */     value12 = false;
/* 036 */     isNull13 = false;
/* 037 */     value13 = null;
/* 038 */     isNull14 = false;
/* 039 */     value14 = null;
/* 040 */     result = new UnsafeRow(1);
/* 041 */     this.holder = new org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result, 32);
/* 042 */     this.rowWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 043 */     this.arrayWriter = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 044 */     this.arrayWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 045 */     this.rowWriter1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 1);
/* 046 */     this.arrayWriter2 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter();
/* 047 */
/* 048 */   }
/* 049 */
/* 050 */   public void initialize(int partitionIndex) {
/* 051 */
/* 052 */   }
/* 053 */
/* 054 */
/* 055 */   private void evalIfTrueExpr(InternalRow i) {
/* 056 */     final InternalRow value7 = null;
/* 057 */     isNull13 = true;
/* 058 */     value13 = value7;
/* 059 */   }
/* 060 */
/* 061 */
/* 062 */   private void evalIfCondExpr(InternalRow i) {
/* 063 */
/* 064 */     isNull12 = false;
/* 065 */     value12 = ExternalMapToCatalyst_value_isNull0;
/* 066 */   }
/* 067 */
/* 068 */
/* 069 */   private void evalIfFalseExpr(InternalRow i) {
/* 070 */     values = new Object[1];
/* 071 */     resultIsNull1 = false;
/* 072 */     if (!resultIsNull1) {
/* 073 */
/* 074 */       boolean isNull11 = true;
/* 075 */       scala.Option value11 = null;
/* 076 */       if (!ExternalMapToCatalyst_value_isNull0) {
/* 077 */
/* 078 */         isNull11 = false;
/* 079 */         if (!isNull11) {
/* 080 */
/* 081 */           Object funcResult1 = null;
/* 082 */           funcResult1 = ExternalMapToCatalyst_value0.values();
/* 083 */           if (funcResult1 == null) {
/* 084 */             isNull11 = true;
/* 085 */           } else {
/* 086 */             value11 = (scala.Option) funcResult1;
/* 087 */           }
/* 088 */
/* 089 */         }
/* 090 */         isNull11 = value11 == null;
/* 091 */       }
/* 092 */
/* 093 */       final boolean isNull10 = isNull11 || value11.isEmpty();
/* 094 */       scala.collection.Seq value10 = isNull10 ?
/* 095 */       null : (scala.collection.Seq) value11.get();
/* 096 */       resultIsNull1 = isNull10;
/* 097 */       argValue1 = value10;
/* 098 */     }
/* 099 */
/* 100 */
/* 101 */     final ArrayData value9 = resultIsNull1 ? null : new org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
/* 102 */     if (resultIsNull1) {
/* 103 */       values[0] = null;
/* 104 */     } else {
/* 105 */       values[0] = value9;
/* 106 */     }
/* 107 */     final InternalRow value8 = new org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
/* 108 */     this.values = null;
/* 109 */     isNull14 = false;
/* 110 */     value14 = value8;
/* 111 */   }
/* 112 */
/* 113 */
/* 114 */   // Scala.Function1 need this
/* 115 */   public java.lang.Object apply(java.lang.Object row) {
/* 116 */     return apply((InternalRow) row);
/* 117 */   }
/* 118 */
/* 119 */   public UnsafeRow apply(InternalRow i) {
/* 120 */     holder.reset();
/* 121 */
/* 122 */     rowWriter.zeroOutNullBytes();
/* 123 */
/* 124 */
/* 125 */     boolean isNull3 = i.isNullAt(0);
/* 126 */     local.ItemProperties value3 = isNull3 ? null : ((local.ItemProperties)i.get(0, null));
/* 127 */
/* 128 */     if (isNull3) {
/* 129 */       throw new RuntimeException(((java.lang.String) references[0]));
/* 130 */     }
/* 131 */     boolean isNull1 = true;
/* 132 */     scala.collection.immutable.Map value1 = null;
/* 133 */     if (!false) {
/* 134 */
/* 135 */       isNull1 = false;
/* 136 */       if (!isNull1) {
/* 137 */
/* 138 */         Object funcResult = null;
/* 139 */         funcResult = value3.properties();
/* 140 */         if (funcResult == null) {
/* 141 */           isNull1 = true;
/* 142 */         } else {
/* 143 */           value1 = (scala.collection.immutable.Map) funcResult;
/* 144 */         }
/* 145 */
/* 146 */       }
/* 147 */       isNull1 = value1 == null;
/* 148 */     }
/* 149 */     MapData value = null;
/* 150 */     if (!isNull1) {
/* 151 */       final int length = value1.size();
/* 152 */       final Object[] convertedKeys = new Object[length];
/* 153 */       final Object[] convertedValues = new Object[length];
/* 154 */       int index = 0;
/* 155 */       final scala.collection.Iterator entries = value1.iterator();
/* 156 */       while(entries.hasNext()) {
/* 157 */
/* 158 */         final scala.Tuple2 entry = (scala.Tuple2) entries.next();
/* 159 */         java.lang.String ExternalMapToCatalyst_key0 = (java.lang.String) entry._1();
/* 160 */         local.Values ExternalMapToCatalyst_value0 = (local.Values) entry._2();
/* 161 */
/* 162 */         boolean ExternalMapToCatalyst_value_isNull0 = ExternalMapToCatalyst_value0 == null;
/* 163 */
/* 164 */
/* 165 */         resultIsNull = false;
/* 166 */         if (!resultIsNull) {
/* 167 */
/* 168 */           resultIsNull = false;
/* 169 */           argValue = ExternalMapToCatalyst_key0;
/* 170 */         }
/* 171 */
/* 172 */         boolean isNull4 = resultIsNull;
/* 173 */         final UTF8String value4 = resultIsNull ? null : org.apache.spark.unsafe.types.UTF8String.fromString(argValue);
/* 174 */         isNull4 = value4 == null;
/* 175 */         if (isNull4) {
/* 176 */           throw new RuntimeException("Cannot use null as map key!");
/* 177 */         } else {
/* 178 */           convertedKeys[index] = (UTF8String) value4;
/* 179 */         }
/* 180 */
/* 181 */
/* 182 */         evalIfCondExpr(i);
/* 183 */         boolean isNull5 = false;
/* 184 */         InternalRow value5 = null;
/* 185 */         if (!isNull12 && value12) {
/* 186 */           evalIfTrueExpr(i);
/* 187 */           isNull5 = isNull13;
/* 188 */           value5 = value13;
/* 189 */         } else {
/* 190 */           evalIfFalseExpr(i);
/* 191 */           isNull5 = isNull14;
/* 192 */           value5 = value14;
/* 193 */         }
/* 194 */         if (isNull5) {
/* 195 */           convertedValues[index] = null;
/* 196 */         } else {
/* 197 */           convertedValues[index] = (InternalRow) value5;
/* 198 */         }
/* 199 */
/* 200 */         index++;
/* 201 */       }
/* 202 */
/* 203 */       value = new org.apache.spark.sql.catalyst.util.ArrayBasedMapData(new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedKeys), new org.apache.spark.sql.catalyst.util.GenericArrayData(convertedValues));
/* 204 */     }
/* 205 */     if (isNull1) {
/* 206 */       rowWriter.setNullAt(0);
/* 207 */     } else {
/* 208 */       // Remember the current cursor so that we can calculate how many bytes are
/* 209 */       // written later.
/* 210 */       final int tmpCursor = holder.cursor;
/* 211 */
/* 212 */       if (value instanceof UnsafeMapData) {
/* 213 */
/* 214 */         final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes();
/* 215 */         // grow the global buffer before writing data.
/* 216 */         holder.grow(sizeInBytes);
/* 217 */         ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor);
/* 218 */         holder.cursor += sizeInBytes;
/* 219 */
/* 220 */       } else {
/* 221 */         final ArrayData keys = value.keyArray();
/* 222 */         final ArrayData values1 = value.valueArray();
/* 223 */
/* 224 */         // preserve 8 bytes to write the key array numBytes later.
/* 225 */         holder.grow(8);
/* 226 */         holder.cursor += 8;
/* 227 */
/* 228 */         // Remember the current cursor so that we can write numBytes of key array later.
/* 229 */         final int tmpCursor1 = holder.cursor;
/* 230 */
/* 231 */
/* 232 */         if (keys instanceof UnsafeArrayData) {
/* 233 */
/* 234 */           final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes();
/* 235 */           // grow the global buffer before writing data.
/* 236 */           holder.grow(sizeInBytes1);
/* 237 */           ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor);
/* 238 */           holder.cursor += sizeInBytes1;
/* 239 */
/* 240 */         } else {
/* 241 */           final int numElements = keys.numElements();
/* 242 */           arrayWriter.initialize(holder, numElements, 8);
/* 243 */
/* 244 */           for (int index1 = 0; index1 < numElements; index1++) {
/* 245 */             if (keys.isNullAt(index1)) {
/* 246 */               arrayWriter.setNull(index1);
/* 247 */             } else {
/* 248 */               final UTF8String element = keys.getUTF8String(index1);
/* 249 */               arrayWriter.write(index1, element);
/* 250 */             }
/* 251 */           }
/* 252 */         }
/* 253 */
/* 254 */         // Write the numBytes of key array into the first 8 bytes.
/* 255 */         Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1);
/* 256 */
/* 257 */
/* 258 */         if (values1 instanceof UnsafeArrayData) {
/* 259 */
/* 260 */           final int sizeInBytes4 = ((UnsafeArrayData) values1).getSizeInBytes();
/* 261 */           // grow the global buffer before writing data.
/* 262 */           holder.grow(sizeInBytes4);
/* 263 */           ((UnsafeArrayData) values1).writeToMemory(holder.buffer, holder.cursor);
/* 264 */           holder.cursor += sizeInBytes4;
/* 265 */
/* 266 */         } else {
/* 267 */           final int numElements1 = values1.numElements();
/* 268 */           arrayWriter1.initialize(holder, numElements1, 8);
/* 269 */
/* 270 */           for (int index2 = 0; index2 < numElements1; index2++) {
/* 271 */             if (values1.isNullAt(index2)) {
/* 272 */               arrayWriter1.setNull(index2);
/* 273 */             } else {
/* 274 */               final InternalRow element1 = values1.getStruct(index2, 1);
/* 275 */
/* 276 */               final int tmpCursor3 = holder.cursor;
/* 277 */
/* 278 */               if (element1 instanceof UnsafeRow) {
/* 279 */
/* 280 */                 final int sizeInBytes2 = ((UnsafeRow) element1).getSizeInBytes();
/* 281 */                 // grow the global buffer before writing data.
/* 282 */                 holder.grow(sizeInBytes2);
/* 283 */                 ((UnsafeRow) element1).writeToMemory(holder.buffer, holder.cursor);
/* 284 */                 holder.cursor += sizeInBytes2;
/* 285 */
/* 286 */               } else {
/* 287 */                 rowWriter1.reset();
/* 288 */
/* 289 */                 final ArrayData fieldName = element1.getArray(0);
/* 290 */                 if (element1.isNullAt(0)) {
/* 291 */                   rowWriter1.setNullAt(0);
/* 292 */                 } else {
/* 293 */                   // Remember the current cursor so that we can calculate how many bytes are
/* 294 */                   // written later.
/* 295 */                   final int tmpCursor4 = holder.cursor;
/* 296 */
/* 297 */                   if (fieldName instanceof UnsafeArrayData) {
/* 298 */
/* 299 */                     final int sizeInBytes3 = ((UnsafeArrayData) fieldName).getSizeInBytes();
/* 300 */                     // grow the global buffer before writing data.
/* 301 */                     holder.grow(sizeInBytes3);
/* 302 */                     ((UnsafeArrayData) fieldName).writeToMemory(holder.buffer, holder.cursor);
/* 303 */                     holder.cursor += sizeInBytes3;
/* 304 */
/* 305 */                   } else {
/* 306 */                     final int numElements2 = fieldName.numElements();
/* 307 */                     arrayWriter2.initialize(holder, numElements2, 8);
/* 308 */
/* 309 */                     for (int index3 = 0; index3 < numElements2; index3++) {
/* 310 */                       if (fieldName.isNullAt(index3)) {
/* 311 */                         arrayWriter2.setNullDouble(index3);
/* 312 */                       } else {
/* 313 */                         final double element2 = fieldName.getDouble(index3);
/* 314 */                         arrayWriter2.write(index3, element2);
/* 315 */                       }
/* 316 */                     }
/* 317 */                   }
/* 318 */
/* 319 */                   rowWriter1.setOffsetAndSize(0, tmpCursor4, holder.cursor - tmpCursor4);
/* 320 */                 }
/* 321 */               }
/* 322 */
/* 323 */               arrayWriter1.setOffsetAndSize(index2, tmpCursor3, holder.cursor - tmpCursor3);
/* 324 */
/* 325 */             }
/* 326 */           }
/* 327 */         }
/* 328 */
/* 329 */       }
/* 330 */
/* 331 */       rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor);
/* 332 */     }
/* 333 */     result.setTotalSize(holder.totalSize());
/* 334 */     return result;
/* 335 */   }
/* 336 */ }

	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:941)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:998)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:995)
	at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
	at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
	... 30 more
Caused by: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 65, Column 46: Expression "ExternalMapToCatalyst_value_isNull0" is not an rvalue
	at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
	at org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:6639)
	at org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5001)
	at org.codehaus.janino.UnitCompiler.access$10500(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$13.visitAmbiguousName(UnitCompiler.java:4984)
	at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:3633)
	at org.codehaus.janino.Java$Lvalue.accept(Java.java:3563)
	at org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
	at org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4925)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
	at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
	at org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
	at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
	at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
	at org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
	at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
	at org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
	at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
	at org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
	at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
	at org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
	at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
	at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
	at org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
	at org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
	at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
	at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
	at org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
	at org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
	at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
	at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
	at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:935)
	... 34 more
17/07/12 21:49:31 INFO SparkContext: Invoking stop() from shutdown hook
17/07/12 21:49:31 INFO SparkUI: Stopped Spark web UI at http://192.168.0.101:4040
17/07/12 21:49:31 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
17/07/12 21:49:31 INFO MemoryStore: MemoryStore cleared
17/07/12 21:49:31 INFO BlockManager: BlockManager stopped
17/07/12 21:49:31 INFO BlockManagerMaster: BlockManagerMaster stopped
17/07/12 21:49:31 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
17/07/12 21:49:31 INFO SparkContext: Successfully stopped SparkContext
17/07/12 21:49:31 INFO ShutdownHookManager: Shutdown hook called
17/07/12 21:49:31 INFO ShutdownHookManager: Deleting directory /private/var/folders/fw/lg6m2xps12bfwd7x0hj3rpzm0000gn/T/spark-886b188a-a3c3-460e-96a0-b7a4d1aa09de

Process finished with exit code 1






--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org