You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Todd <bi...@163.com> on 2016/01/28 08:31:24 UTC

Compile error when compiling spark 2.0.0 snapshot code base in IDEA

Hi,
I am able to maven install the whole spark project(from github ) in my IDEA.

But, when I run the SparkPi example, IDEA compiles the code again and following exeception is thrown,

Does someone meet this problem? Thanks a lot.





Error:scalac:
     while compiling: D:\opensourceprojects\spark\sql\core\src\main\scala\org\apache\spark\sql\util\QueryExecutionListener.scala
        during phase: jvm
     library version: version 2.10.4
    compiler version: version 2.10.4
  reconstructed args: -nobootcp -deprecation -feature -javabootclasspath ; -unchecked -classpath C:\jdk1.7.0_80\jre\lib\charsets.jar;C:\jdk1.7.0_80\jre\lib\deploy.jar;C:\jdk1.7.0_80\jre\lib\javaws.jar;C:\jdk1.7.0_80\jre\lib\jce.jar;C:\jdk1.7.0_80\jre\lib\jfr.jar;C:\jdk1.7.0_80\jre\lib\jfxrt.jar;C:\jdk1.7.0_80\jre\lib\jsse.jar;C:\jdk1.7.0_80\jre\lib\management-agent.jar;C:\jdk1.7.0_80\jre\lib\plugin.jar;C:\jdk1.7.0_80\jre\lib\resources.jar;C:\jdk1.7.0_80\jre\lib\rt.jar;C:\jdk1.7.0_80\jre\lib\ext\access-bridge-64.jar;C:\jdk1.7.0_80\jre\lib\ext\dnsns.jar;C:\jdk1.7.0_80\jre\lib\ext\jaccess.jar;C:\jdk1.7.0_80\jre\lib\ext\localedata.jar;C:\jdk1.7.0_80\jre\lib\ext\sunec.jar;C:\jdk1.7.0_80\jre\lib\ext\sunjce_provider.jar;C:\jdk1.7.0_80\jre\lib\ext\sunmscapi.jar;C:\jdk1.7.0_80\jre\lib\ext\zipfs.jar;D:\opensourceprojects\spark\sql\core\target\scala-2.10\classes;C:\Users\yuzhitao\.m2\repository\com\univocity\univocity-parsers\1.5.6\univocity-parsers-1.5.6.jar;D:\opensourceprojects\spark\core\target\scala-2.10\classes;C:\Users\yuzhitao\.m2\repository\org\apache\avro\avro-mapred\1.7.7\avro-mapred-1.7.7-hadoop2.jar;C:\Users\yuzhitao\.m2\repository\org\apache\avro\avro-ipc\1.7.7\avro-ipc-1.7.7.jar;C:\Users\yuzhitao\.m2\repository\org\apache\avro\avro-ipc\1.7.7\avro-ipc-1.7.7-tests.jar;C:\Users\yuzhitao\.m2\repository\com\google\guava\guava\14.0.1\guava-14.0.1.jar;C:\Users\yuzhitao\.m2\repository\com\twitter\chill_2.10\0.5.0\chill_2.10-0.5.0.jar;C:\Users\yuzhitao\.m2\repository\com\esotericsoftware\kryo\kryo\2.21\kryo-2.21.jar;C:\Users\yuzhitao\.m2\repository\com\esotericsoftware\reflectasm\reflectasm\1.07\reflectasm-1.07-shaded.jar;C:\Users\yuzhitao\.m2\repository\com\esotericsoftware\minlog\minlog\1.2\minlog-1.2.jar;C:\Users\yuzhitao\.m2\repository\com\twitter\chill-java\0.5.0\chill-java-0.5.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-client\2.2.0\hadoop-client-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-common\2.2.0\hadoop-common-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;C:\Users\yuzhitao\.m2\repository\org\apache\commons\commons-math\2.1\commons-math-2.1.jar;C:\Users\yuzhitao\.m2\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;C:\Users\yuzhitao\.m2\repository\commons-io\commons-io\2.1\commons-io-2.1.jar;C:\Users\yuzhitao\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\yuzhitao\.m2\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;C:\Users\yuzhitao\.m2\repository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\Users\yuzhitao\.m2\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;C:\Users\yuzhitao\.m2\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\commons-beanutils\commons-beanutils-core\1.8.0\commons-beanutils-core-1.8.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-auth\2.2.0\hadoop-auth-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-hdfs\2.2.0\hadoop-hdfs-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.2.0\hadoop-mapreduce-client-app-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.2.0\hadoop-mapreduce-client-common-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-yarn-client\2.2.0\hadoop-yarn-client-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\com\google\inject\guice\3.0\guice-3.0.jar;C:\Users\yuzhitao\.m2\repository\javax\inject\javax.inject\1\javax.inject-1.jar;C:\Users\yuzhitao\.m2\repository\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-test-framework\jersey-test-framework-grizzly2\1.9\jersey-test-framework-grizzly2-1.9.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-test-framework\jersey-test-framework-core\1.9\jersey-test-framework-core-1.9.jar;C:\Users\yuzhitao\.m2\repository\javax\servlet\javax.servlet-api\3.0.1\javax.servlet-api-3.0.1.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-client\1.9\jersey-client-1.9.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-grizzly2\1.9\jersey-grizzly2-1.9.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\grizzly\grizzly-http\2.1.2\grizzly-http-2.1.2.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\grizzly\grizzly-framework\2.1.2\grizzly-framework-2.1.2.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\gmbal\gmbal-api-only\3.0.0-b023\gmbal-api-only-3.0.0-b023.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\external\management-api\3.0.0-b012\management-api-3.0.0-b012.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\grizzly\grizzly-http-server\2.1.2\grizzly-http-server-2.1.2.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\grizzly\grizzly-rcm\2.1.2\grizzly-rcm-2.1.2.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\grizzly\grizzly-http-servlet\2.1.2\grizzly-http-servlet-2.1.2.jar;C:\Users\yuzhitao\.m2\repository\org\glassfish\javax.servlet\3.1\javax.servlet-3.1.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;C:\Users\yuzhitao\.m2\repository\com\sun\xml\bind\jaxb-impl\2.2.3-1\jaxb-impl-2.2.3-1.jar;C:\Users\yuzhitao\.m2\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;C:\Users\yuzhitao\.m2\repository\javax\activation\activation\1.1\activation-1.1.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\jackson\jackson-jaxrs\1.9.13\jackson-jaxrs-1.9.13.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\jackson\jackson-xc\1.9.13\jackson-xc-1.9.13.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-yarn-server-common\2.2.0\hadoop-yarn-server-common-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.2.0\hadoop-mapreduce-client-shuffle-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-yarn-api\2.2.0\hadoop-yarn-api-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.2.0\hadoop-mapreduce-client-core-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-yarn-common\2.2.0\hadoop-yarn-common-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.2.0\hadoop-mapreduce-client-jobclient-2.2.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\hadoop\hadoop-annotations\2.2.0\hadoop-annotations-2.2.0.jar;D:\opensourceprojects\spark\launcher\target\scala-2.10\classes;D:\opensourceprojects\spark\network\common\target\scala-2.10\classes;D:\opensourceprojects\spark\network\shuffle\target\scala-2.10\classes;C:\Users\yuzhitao\.m2\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;D:\opensourceprojects\spark\unsafe\target\scala-2.10\classes;C:\Users\yuzhitao\.m2\repository\net\java\dev\jets3t\jets3t\0.7.1\jets3t-0.7.1.jar;C:\Users\yuzhitao\.m2\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;C:\Users\yuzhitao\.m2\repository\org\apache\curator\curator-recipes\2.4.0\curator-recipes-2.4.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\curator\curator-framework\2.4.0\curator-framework-2.4.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\curator\curator-client\2.4.0\curator-client-2.4.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\zookeeper\zookeeper\3.4.5\zookeeper-3.4.5.jar;C:\Users\yuzhitao\.m2\repository\jline\jline\0.9.94\jline-0.9.94.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-plus\8.1.14.v20131031\jetty-plus-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\orbit\javax.transaction\1.1.1.v201105210645\javax.transaction-1.1.1.v201105210645.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-webapp\8.1.14.v20131031\jetty-webapp-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-xml\8.1.14.v20131031\jetty-xml-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-jndi\8.1.14.v20131031\jetty-jndi-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\orbit\javax.mail.glassfish\1.4.1.v201005082020\javax.mail.glassfish-1.4.1.v201005082020.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\orbit\javax.activation\1.1.0.v201105071233\javax.activation-1.1.0.v201105071233.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-security\8.1.14.v20131031\jetty-security-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-util\8.1.14.v20131031\jetty-util-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-server\8.1.14.v20131031\jetty-server-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-http\8.1.14.v20131031\jetty-http-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-io\8.1.14.v20131031\jetty-io-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-continuation\8.1.14.v20131031\jetty-continuation-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\jetty-servlet\8.1.14.v20131031\jetty-servlet-8.1.14.v20131031.jar;C:\Users\yuzhitao\.m2\repository\org\eclipse\jetty\orbit\javax.servlet\3.0.0.v201112011016\javax.servlet-3.0.0.v201112011016.jar;C:\Users\yuzhitao\.m2\repository\org\apache\commons\commons-lang3\3.3.2\commons-lang3-3.3.2.jar;C:\Users\yuzhitao\.m2\repository\org\apache\commons\commons-math3\3.4.1\commons-math3-3.4.1.jar;C:\Users\yuzhitao\.m2\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;C:\Users\yuzhitao\.m2\repository\org\slf4j\slf4j-api\1.7.10\slf4j-api-1.7.10.jar;C:\Users\yuzhitao\.m2\repository\org\slf4j\jul-to-slf4j\1.7.10\jul-to-slf4j-1.7.10.jar;C:\Users\yuzhitao\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.10\jcl-over-slf4j-1.7.10.jar;C:\Users\yuzhitao\.m2\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\Users\yuzhitao\.m2\repository\org\slf4j\slf4j-log4j12\1.7.10\slf4j-log4j12-1.7.10.jar;C:\Users\yuzhitao\.m2\repository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\Users\yuzhitao\.m2\repository\org\xerial\snappy\snappy-java\1.1.2\snappy-java-1.1.2.jar;C:\Users\yuzhitao\.m2\repository\net\jpountz\lz4\lz4\1.3.0\lz4-1.3.0.jar;C:\Users\yuzhitao\.m2\repository\org\roaringbitmap\RoaringBitmap\0.5.11\RoaringBitmap-0.5.11.jar;C:\Users\yuzhitao\.m2\repository\commons-net\commons-net\2.2\commons-net-2.2.jar;C:\Users\yuzhitao\.m2\repository\com\typesafe\akka\akka-remote_2.10\2.3.11\akka-remote_2.10-2.3.11.jar;C:\Users\yuzhitao\.m2\repository\com\typesafe\akka\akka-actor_2.10\2.3.11\akka-actor_2.10-2.3.11.jar;C:\Users\yuzhitao\.m2\repository\com\typesafe\config\1.2.1\config-1.2.1.jar;C:\Users\yuzhitao\.m2\repository\io\netty\netty\3.8.0.Final\netty-3.8.0.Final.jar;C:\Users\yuzhitao\.m2\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\Users\yuzhitao\.m2\repository\org\uncommons\maths\uncommons-maths\1.2.2a\uncommons-maths-1.2.2a.jar;C:\Users\yuzhitao\.m2\repository\com\typesafe\akka\akka-slf4j_2.10\2.3.11\akka-slf4j_2.10-2.3.11.jar;C:\Users\yuzhitao\.m2\repository\org\scala-lang\scala-library\2.10.4\scala-library-2.10.4.jar;C:\Users\yuzhitao\.m2\repository\org\json4s\json4s-jackson_2.10\3.2.10\json4s-jackson_2.10-3.2.10.jar;C:\Users\yuzhitao\.m2\repository\org\json4s\json4s-core_2.10\3.2.10\json4s-core_2.10-3.2.10.jar;C:\Users\yuzhitao\.m2\repository\org\json4s\json4s-ast_2.10\3.2.10\json4s-ast_2.10-3.2.10.jar;C:\Users\yuzhitao\.m2\repository\org\scala-lang\scalap\2.10.4\scalap-2.10.4.jar;C:\Users\yuzhitao\.m2\repository\org\scala-lang\scala-compiler\2.10.4\scala-compiler-2.10.4.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;C:\Users\yuzhitao\.m2\repository\asm\asm\3.1\asm-3.1.jar;C:\Users\yuzhitao\.m2\repository\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;C:\Users\yuzhitao\.m2\repository\org\apache\mesos\mesos\0.21.1\mesos-0.21.1-shaded-protobuf.jar;C:\Users\yuzhitao\.m2\repository\io\netty\netty-all\4.0.29.Final\netty-all-4.0.29.Final.jar;C:\Users\yuzhitao\.m2\repository\com\clearspring\analytics\stream\2.7.0\stream-2.7.0.jar;C:\Users\yuzhitao\.m2\repository\io\dropwizard\metrics\metrics-core\3.1.2\metrics-core-3.1.2.jar;C:\Users\yuzhitao\.m2\repository\io\dropwizard\metrics\metrics-jvm\3.1.2\metrics-jvm-3.1.2.jar;C:\Users\yuzhitao\.m2\repository\io\dropwizard\metrics\metrics-json\3.1.2\metrics-json-3.1.2.jar;C:\Users\yuzhitao\.m2\repository\io\dropwizard\metrics\metrics-graphite\3.1.2\metrics-graphite-3.1.2.jar;C:\Users\yuzhitao\.m2\repository\com\fasterxml\jackson\module\jackson-module-scala_2.10\2.5.3\jackson-module-scala_2.10-2.5.3.jar;C:\Users\yuzhitao\.m2\repository\com\thoughtworks\paranamer\paranamer\2.6\paranamer-2.6.jar;C:\Users\yuzhitao\.m2\repository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;C:\Users\yuzhitao\.m2\repository\oro\oro\2.0.8\oro-2.0.8.jar;C:\Users\yuzhitao\.m2\repository\net\razorvine\pyrolite\4.9\pyrolite-4.9.jar;C:\Users\yuzhitao\.m2\repository\net\sf\py4j\py4j\0.9.1\py4j-0.9.1.jar;D:\opensourceprojects\spark\sql\catalyst\target\scala-2.10\classes;C:\Users\yuzhitao\.m2\repository\org\scala-lang\scala-reflect\2.10.4\scala-reflect-2.10.4.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\janino\janino\2.7.8\janino-2.7.8.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\janino\commons-compiler\2.7.8\commons-compiler-2.7.8.jar;C:\Users\yuzhitao\.m2\repository\org\antlr\antlr-runtime\3.5.2\antlr-runtime-3.5.2.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-column\1.7.0\parquet-column-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-common\1.7.0\parquet-common-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-encoding\1.7.0\parquet-encoding-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-generator\1.7.0\parquet-generator-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\commons-codec\commons-codec\1.10\commons-codec-1.10.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-hadoop\1.7.0\parquet-hadoop-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-format\2.3.0-incubating\parquet-format-2.3.0-incubating.jar;C:\Users\yuzhitao\.m2\repository\org\apache\parquet\parquet-jackson\1.7.0\parquet-jackson-1.7.0.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;C:\Users\yuzhitao\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;C:\Users\yuzhitao\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.5.3\jackson-databind-2.5.3.jar;C:\Users\yuzhitao\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.5.3\jackson-annotations-2.5.3.jar;C:\Users\yuzhitao\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.5.3\jackson-core-2.5.3.jar;C:\Users\yuzhitao\.m2\repository\org\apache\avro\avro\1.7.7\avro-1.7.7.jar;C:\Users\yuzhitao\.m2\repository\org\apache\commons\commons-compress\1.4.1\commons-compress-1.4.1.jar;C:\Users\yuzhitao\.m2\repository\org\tukaani\xz\1.0\xz-1.0.jar;C:\Users\yuzhitao\.m2\repository\org\objenesis\objenesis\1.0\objenesis-1.0.jar;C:\Users\yuzhitao\.m2\repository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar
  last tree to typer: Literal(Constant(org.apache.spark.sql.test.ExamplePoint))
              symbol: null
   symbol definition: null
                 tpe: Class(classOf[org.apache.spark.sql.test.ExamplePoint])
       symbol owners:
      context owners: anonymous class withErrorHandling$1 -> package util
== Enclosing template or block ==
Template( // val <local $anonfun>: <notype>, tree.tpe=org.apache.spark.sql.util.withErrorHandling$1
  "scala.runtime.AbstractFunction1", "scala.Serializable" // parents
  ValDef(
    private
    "_"
    <tpt>
    <empty>
  )
  // 5 statements
  DefDef( // final def apply(listener: org.apache.spark.sql.util.QueryExecutionListener): Unit
    <method> final <triedcooking>
    "apply"
    []
    // 1 parameter list
    ValDef( // listener: org.apache.spark.sql.util.QueryExecutionListener
      <param> <triedcooking>
      "listener"
      <tpt> // tree.tpe=org.apache.spark.sql.util.QueryExecutionListener
      <empty>
    )
    <tpt> // tree.tpe=Unit
    Try( // tree.tpe=Unit
      Block( // tree.tpe=Unit
        Apply( // def apply(v1: Object): Object in trait Function1, tree.tpe=Object
          ExecutionListenerManager$$anonfun$org$apache$spark$sql$util$ExecutionListenerManager$$withErrorHandling$1.this."f$1"."apply" // def apply(v1: Object): Object in trait Function1, tree.tpe=(v1: Object)Object
          "listener" // listener: org.apache.spark.sql.util.QueryExecutionListener, tree.tpe=org.apache.spark.sql.util.QueryExecutionListener
        )
        ()
      )
      CaseDef( // tree.tpe=Unit
        Bind( // val ex6: Throwable, tree.tpe=Throwable
          "ex6"
          "_" // tree.tpe=Throwable
        )
        Block( // tree.tpe=Unit
          // 3 statements
          ValDef( // val x4: Throwable
            <synthetic> <triedcooking>
            "x4"
            <tpt> // tree.tpe=Throwable
            "ex6" // val ex6: Throwable, tree.tpe=Throwable
          )
          LabelDef( // case def case9(): Unit, tree.tpe=Unit
            ()
            Block( // tree.tpe=Unit
              ValDef( // val o11: Option
                <synthetic> <triedcooking>
                "o11"
                <tpt> // tree.tpe=Option
                Apply( // def unapply(t: Throwable): Option in object NonFatal, tree.tpe=Option
                  "scala"."util"."control"."NonFatal"."unapply" // def unapply(t: Throwable): Option in object NonFatal, tree.tpe=(t: Throwable)Option
                  "x4" // val x4: Throwable, tree.tpe=Throwable
                )
              )
              If( // tree.tpe=Unit
                Apply( // def unary_!(): Boolean in class Boolean, tree.tpe=Boolean
                  o11.isEmpty()."unary_$bang" // def unary_!(): Boolean in class Boolean, tree.tpe=()Boolean
                  Nil
                )
                Block( // tree.tpe=Unit
                  ValDef( // val e: Throwable
                    <triedcooking>
                    "e"
                    <tpt> // tree.tpe=Throwable
                    Apply( // final def $asInstanceOf[T0 >: ? <: ?](): T0 in class Object, tree.tpe=Throwable
                      TypeApply( // final def $asInstanceOf[T0 >: ? <: ?](): T0 in class Object, tree.tpe=()Throwable
                        o11.get()."$asInstanceOf" // final def $asInstanceOf[T0 >: ? <: ?](): T0 in class Object, tree.tpe=[T0 >: ? <: ?]()T0
                        <tpt> // tree.tpe=Throwable
                      )
                      Nil
                    )
                  )
                  Apply( // case def matchEnd8(x: runtime.BoxedUnit): Unit, tree.tpe=Unit
                    "matchEnd8" // case def matchEnd8(x: runtime.BoxedUnit): Unit, tree.tpe=(x: runtime.BoxedUnit)Unit
                    Block( // tree.tpe=runtime.BoxedUnit
                      Apply( // def logWarning(msg: Function0,throwable: Throwable): Unit in trait Logging, tree.tpe=Unit
                        ExecutionListenerManager$$anonfun$org$apache$spark$sql$util$ExecutionListenerManager$$withErrorHandling$1.this."$outer "."logWarning" // def logWarning(msg: Function0,throwable: Throwable): Unit in trait Logging, tree.tpe=(msg: Function0, throwable: Throwable)Unit
                        // 2 arguments
                        Block( // tree.tpe=Function0
                          {}
                          Typed( // tree.tpe=Function0
                            Apply( // def <init>(arg$outer: org.apache.spark.sql.util.withErrorHandling$1): org.apache.spark.sql.util.anonfun$apply$1, tree.tpe=org.apache.spark.sql.util.anonfun$apply$1
                              new anonymous class anonfun$apply$1."<init>" // def <init>(arg$outer: org.apache.spark.sql.util.withErrorHandling$1): org.apache.spark.sql.util.anonfun$apply$1, tree.tpe=(arg$outer: org.apache.spark.sql.util.withErrorHandling$1)org.apache.spark.sql.util.anonfun$apply$1
                              This("$anonfun")final class withErrorHandling$1 extends runtime.AbstractFunction1 with Serializable in package util, tree.tpe=org.apache.spark.sql.util.withErrorHandling$1
                            )
                            <tpt> // tree.tpe=Function0
                          )
                        )
                        "e" // val e: Throwable, tree.tpe=Throwable
                      )
                      "scala"."runtime"."BoxedUnit"."UNIT" // final val UNIT: runtime.BoxedUnit in object BoxedUnit, tree.tpe=runtime.BoxedUnit
                    )
                  )
                )
                Apply( // case def case10(): Unit, tree.tpe=Unit
                  "case10" // case def case10(): Unit, tree.tpe=()Unit
                  Nil
                )
              )
            )
          )
          LabelDef( // case def case10(): Unit, tree.tpe=Unit
            ()
            Apply( // case def matchEnd8(x: runtime.BoxedUnit): Unit, tree.tpe=Unit
              "matchEnd8" // case def matchEnd8(x: runtime.BoxedUnit): Unit, tree.tpe=(x: runtime.BoxedUnit)Unit
              Throw("ex6")tree.tpe=Nothing
            )
          )
          LabelDef( // case def matchEnd8(x: runtime.BoxedUnit): Unit, tree.tpe=Unit
            "x" // x: runtime.BoxedUnit, tree.tpe=runtime.BoxedUnit
            ()
          )
        )
      )
    )
  )
  ValDef( // private[this] val $outer: org.apache.spark.sql.util.ExecutionListenerManager
    private <local> <synthetic> <paramaccessor> <triedcooking>
    "$outer "
    <tpt> // tree.tpe=org.apache.spark.sql.util.ExecutionListenerManager
    <empty>
  )
  DefDef( // final def apply(v1: Object): Object
    <method> final <bridge>
    "apply"
    []
    // 1 parameter list
    ValDef( // v1: Object
      <param> <triedcooking>
      "v1"
      <tpt> // tree.tpe=Object
      <empty>
    )
    <tpt> // tree.tpe=Object
    Block( // tree.tpe=runtime.BoxedUnit
      Apply( // final def apply(listener: org.apache.spark.sql.util.QueryExecutionListener): Unit, tree.tpe=Unit
        ExecutionListenerManager$$anonfun$org$apache$spark$sql$util$ExecutionListenerManager$$withErrorHandling$1.this."apply" // final def apply(listener: org.apache.spark.sql.util.QueryExecutionListener): Unit, tree.tpe=(listener: org.apache.spark.sql.util.QueryExecutionListener)Unit
        Apply( // final def $asInstanceOf[T0 >: ? <: ?](): T0 in class Object, tree.tpe=org.apache.spark.sql.util.QueryExecutionListener
          TypeApply( // final def $asInstanceOf[T0 >: ? <: ?](): T0 in class Object, tree.tpe=()org.apache.spark.sql.util.QueryExecutionListener
            "v1"."$asInstanceOf" // final def $asInstanceOf[T0 >: ? <: ?](): T0 in class Object, tree.tpe=[T0 >: ? <: ?]()T0
            <tpt> // tree.tpe=org.apache.spark.sql.util.QueryExecutionListener
          )
          Nil
        )
      )
      "scala"."runtime"."BoxedUnit"."UNIT" // final val UNIT: runtime.BoxedUnit in object BoxedUnit, tree.tpe=runtime.BoxedUnit
    )
  )
  ValDef( // private[this] val f$1: Function1
    private <local> <synthetic> <paramaccessor> <triedcooking>
    "f$1"
    <tpt> // tree.tpe=Function1
    <empty>
  )
  DefDef( // def <init>(arg$outer: org.apache.spark.sql.util.ExecutionListenerManager,f$1: Function1): org.apache.spark.sql.util.withErrorHandling$1
    <method> <triedcooking>
    "<init>"
    []
    // 1 parameter list
    ValDef( // $outer: org.apache.spark.sql.util.ExecutionListenerManager
      <param> <triedcooking>
      "$outer"
      <tpt> // tree.tpe=org.apache.spark.sql.util.ExecutionListenerManager
      <empty>
    )
    ValDef( // f$1: Function1
      <param> <synthetic> <triedcooking>
      "f$1"
      <tpt> // tree.tpe=Function1
      <empty>
    )
    <tpt> // tree.tpe=org.apache.spark.sql.util.withErrorHandling$1
    Block( // tree.tpe=Unit
      // 3 statements
      If( // tree.tpe=Unit
        Apply( // final def eq(x$1: Object): Boolean in class Object, tree.tpe=Boolean
          "$outer"."eq" // final def eq(x$1: Object): Boolean in class Object, tree.tpe=(x$1: Object)Boolean
          null
        )
        Throw( // tree.tpe=Nothing
          Apply( // def <init>(): NullPointerException in class NullPointerException, tree.tpe=NullPointerException
            new NullPointerException."<init>" // def <init>(): NullPointerException in class NullPointerException, tree.tpe=()NullPointerException
            Nil
          )
        )
        Assign( // tree.tpe=Unit
          ExecutionListenerManager$$anonfun$org$apache$spark$sql$util$ExecutionListenerManager$$withErrorHandling$1.this."$outer " // private[this] val $outer: org.apache.spark.sql.util.ExecutionListenerManager, tree.tpe=org.apache.spark.sql.util.ExecutionListenerManager
          "$outer" // $outer: org.apache.spark.sql.util.ExecutionListenerManager, tree.tpe=org.apache.spark.sql.util.ExecutionListenerManager
        )
      )
      Assign( // tree.tpe=Unit
        ExecutionListenerManager$$anonfun$org$apache$spark$sql$util$ExecutionListenerManager$$withErrorHandling$1.this."f$1" // private[this] val f$1: Function1, tree.tpe=Function1
        "f$1" // f$1: Function1, tree.tpe=Function1
      )
      Apply( // def <init>(): scala.runtime.AbstractFunction1 in class AbstractFunction1, tree.tpe=scala.runtime.AbstractFunction1
        ExecutionListenerManager$$anonfun$org$apache$spark$sql$util$ExecutionListenerManager$$withErrorHandling$1.super."<init>" // def <init>(): scala.runtime.AbstractFunction1 in class AbstractFunction1, tree.tpe=()scala.runtime.AbstractFunction1
        Nil
      )
      ()
    )
  )
)
== Expanded type of tree ==
ConstantType(
  value = Constant(org.apache.spark.sql.test.ExamplePoint)
)
uncaught exception during compilation: java.lang.AssertionError

Error:scalac: Error: assertion failed: List(object package$DebugNode, object package$DebugNode)
java.lang.AssertionError: assertion failed: List(object package$DebugNode, object package$DebugNode)

    at scala.reflect.internal.Symbols$Symbol.suchThat(Symbols.scala:1678)

    at scala.reflect.internal.Symbols$ClassSymbol.companionModule0(Symbols.scala:2988)

    at scala.reflect.internal.Symbols$ClassSymbol.companionModule(Symbols.scala:2991)

    at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder.genClass(GenASM.scala:1371)

    at scala.tools.nsc.backend.jvm.GenASM$AsmPhase.run(GenASM.scala:120)

    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1583)

    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1557)

    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1553)

    at scala.tools.nsc.Global$Run.compile(Global.scala:1662)

    at xsbt.CachedCompiler0.run(CompilerInterface.scala:126)

    at xsbt.CachedCompiler0.run(CompilerInterface.scala:102)

    at xsbt.CompilerInterface.run(CompilerInterface.scala:27)

    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

    at java.lang.reflect.Method.invoke(Method.java:606)

    at sbt.compiler.AnalyzingCompiler.call(AnalyzingCompiler.scala:102)

    at sbt.compiler.AnalyzingCompiler.compile(AnalyzingCompiler.scala:48)

    at sbt.compiler.AnalyzingCompiler.compile(AnalyzingCompiler.scala:41)

    at org.jetbrains.jps.incremental.scala.local.IdeaIncrementalCompiler.compile(IdeaIncrementalCompiler.scala:29)

    at org.jetbrains.jps.incremental.scala.local.LocalServer.compile(LocalServer.scala:26)

    at org.jetbrains.jps.incremental.scala.remote.Main$.make(Main.scala:62)

    at org.jetbrains.jps.incremental.scala.remote.Main$.nailMain(Main.scala:20)

    at org.jetbrains.jps.incremental.scala.remote.Main.nailMain(Main.scala)

    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

    at java.lang.reflect.Method.invoke(Method.java:606)

    at com.martiansoftware.nailgun.NGSession.run(NGSession.java:319)