You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Jungtaek Lim (Jira)" <ji...@apache.org> on 2019/09/18 05:33:00 UTC

[jira] [Created] (SPARK-29134) Flaky test: org.apache.spark.sql.execution.SQLExecutionSuite.concurrent query execution with fork-join pool (SPARK-13747)

Jungtaek Lim created SPARK-29134:
------------------------------------

             Summary: Flaky test: org.apache.spark.sql.execution.SQLExecutionSuite.concurrent query execution with fork-join pool (SPARK-13747)
                 Key: SPARK-29134
                 URL: https://issues.apache.org/jira/browse/SPARK-29134
             Project: Spark
          Issue Type: Bug
          Components: SQL, Tests
    Affects Versions: 3.0.0
            Reporter: Jungtaek Lim


[https://amplab.cs.berkeley.edu/jenkins/view/Spark%20QA%20Test/job/spark-master-test-maven-hadoop-2.7-jdk-11-ubuntu-testing/1544/testReport/]
{code:java}
scala.ScalaReflectionException: type T1 is not a class
      at scala.reflect.api.Symbols$SymbolApi.asClass(Symbols.scala:284)
      at scala.reflect.api.Symbols$SymbolApi.asClass$(Symbols.scala:284)
      at scala.reflect.internal.Symbols$SymbolContextApiImpl.asClass(Symbols.scala:106)
      at org.apache.spark.sql.catalyst.ScalaReflection$.getClassFromType(ScalaReflection.scala:617)
      at org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$dataTypeFor$1(ScalaReflection.scala:105)
      at scala.reflect.internal.tpe.TypeConstraints$UndoLog.undo(TypeConstraints.scala:68)
      at org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects(ScalaReflection.scala:848)
      at org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects$(ScalaReflection.scala:847)
      at org.apache.spark.sql.catalyst.ScalaReflection$.cleanUpReflectionObjects(ScalaReflection.scala:47)
      at org.apache.spark.sql.catalyst.ScalaReflection$.dataTypeFor(ScalaReflection.scala:86)
      at org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$serializerFor$6(ScalaReflection.scala:551)
      at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:237)
      at scala.collection.immutable.List.foreach(List.scala:392)
      at scala.collection.TraversableLike.map(TraversableLike.scala:237)
      at scala.collection.TraversableLike.map$(TraversableLike.scala:230)
      at scala.collection.immutable.List.map(List.scala:298)
      at org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$serializerFor$1(ScalaReflection.scala:541)
      at scala.reflect.internal.tpe.TypeConstraints$UndoLog.undo(TypeConstraints.scala:68)
      at org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects(ScalaReflection.scala:848)
      at org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects$(ScalaReflection.scala:847)
      at org.apache.spark.sql.catalyst.ScalaReflection$.cleanUpReflectionObjects(ScalaReflection.scala:47)
      at org.apache.spark.sql.catalyst.ScalaReflection$.serializerFor(ScalaReflection.scala:410)
      at org.apache.spark.sql.catalyst.ScalaReflection$.$anonfun$serializerForType$1(ScalaReflection.scala:399)
      at scala.reflect.internal.tpe.TypeConstraints$UndoLog.undo(TypeConstraints.scala:68)
      at org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects(ScalaReflection.scala:848)
      at org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects$(ScalaReflection.scala:847)
      at org.apache.spark.sql.catalyst.ScalaReflection$.cleanUpReflectionObjects(ScalaReflection.scala:47)
      at org.apache.spark.sql.catalyst.ScalaReflection$.serializerForType(ScalaReflection.scala:391)
      at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$.apply(ExpressionEncoder.scala:54)
      at org.apache.spark.sql.Encoders$.product(Encoders.scala:285)
      at org.apache.spark.sql.LowPrioritySQLImplicits.newProductEncoder(SQLImplicits.scala:251)
      at org.apache.spark.sql.LowPrioritySQLImplicits.newProductEncoder$(SQLImplicits.scala:251)
      at org.apache.spark.sql.SQLImplicits.newProductEncoder(SQLImplicits.scala:32)
      at org.apache.spark.sql.execution.SQLExecutionSuite.$anonfun$new$3(SQLExecutionSuite.scala:50)
      at scala.runtime.java8.JFunction1$mcJI$sp.apply(JFunction1$mcJI$sp.java:23)
      at scala.Function1.apply$mcVI$sp(Function1.scala:41)
      at scala.collection.parallel.immutable.ParRange$ParRangeIterator.foreach(ParRange.scala:94)
      at scala.collection.parallel.ParIterableLike$Foreach.leaf(ParIterableLike.scala:974)
      at scala.collection.parallel.Task.$anonfun$tryLeaf$1(Tasks.scala:53)
      at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
      at scala.util.control.Breaks$$anon$1.catchBreak(Breaks.scala:67)
      at scala.collection.parallel.Task.tryLeaf(Tasks.scala:56)
      at scala.collection.parallel.Task.tryLeaf$(Tasks.scala:50)
      at scala.collection.parallel.ParIterableLike$Foreach.tryLeaf(ParIterableLike.scala:971)
      at scala.collection.parallel.AdaptiveWorkStealingTasks$WrappedTask.internal(Tasks.scala:160)
      at scala.collection.parallel.AdaptiveWorkStealingTasks$WrappedTask.internal$(Tasks.scala:157)
      at scala.collection.parallel.AdaptiveWorkStealingForkJoinTasks$WrappedTask.internal(Tasks.scala:440)
      at scala.collection.parallel.AdaptiveWorkStealingTasks$WrappedTask.compute(Tasks.scala:150)
      at scala.collection.parallel.AdaptiveWorkStealingTasks$WrappedTask.compute$(Tasks.scala:149)
      at scala.collection.parallel.AdaptiveWorkStealingForkJoinTasks$WrappedTask.compute(Tasks.scala:440)
      at java.base/java.util.concurrent.RecursiveAction.exec(RecursiveAction.java:189)
      at java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:290)
      at java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1020)
      at java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1656)
      at java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1594)
      at java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:177) {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org