You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Apache Spark (Jira)" <ji...@apache.org> on 2020/08/15 13:59:00 UTC

[jira] [Assigned] (SPARK-32625) Log error message when falling back to interpreter mode

     [ https://issues.apache.org/jira/browse/SPARK-32625?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Apache Spark reassigned SPARK-32625:
------------------------------------

    Assignee:     (was: Apache Spark)

> Log error message when falling back to interpreter mode
> -------------------------------------------------------
>
>                 Key: SPARK-32625
>                 URL: https://issues.apache.org/jira/browse/SPARK-32625
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.0.0
>            Reporter: Yuming Wang
>            Priority: Major
>
> Current:
> {noformat}
> 21:45:52.787 WARN org.apache.spark.sql.catalyst.expressions.Predicate: Expr codegen error and falling back to interpreter mode
> +---+----+
> | id|   k|
> +---+----+
> |  0|0.00|
> |  1|1.00|
> +---+----+
> {noformat}
> Excepted:
> {noformat}
> 21:48:44.612 WARN org.apache.spark.sql.catalyst.expressions.Predicate: Expr codegen error and falling back to interpreter mode
> java.lang.IllegalArgumentException: Can not interpolate org.apache.spark.sql.types.Decimal into code block.
> 	at org.apache.spark.sql.catalyst.expressions.codegen.Block$BlockHelper$.$anonfun$code$1(javaCode.scala:240)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.Block$BlockHelper$.$anonfun$code$1$adapted(javaCode.scala:236)
> 	at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> 	at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> 	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.Block$BlockHelper$.code$extension(javaCode.scala:236)
> 	at org.apache.spark.sql.execution.MixedFilterSubqueryExec.doGenCode(subquery.scala:284)
> 	at org.apache.spark.sql.catalyst.expressions.Expression.$anonfun$genCode$3(Expression.scala:147)
> 	at scala.Option.getOrElse(Option.scala:189)
> 	at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:142)
> 	at org.apache.spark.sql.catalyst.expressions.DynamicPruningExpression.doGenCode(DynamicPruning.scala:93)
> 	at org.apache.spark.sql.catalyst.expressions.Expression.$anonfun$genCode$3(Expression.scala:147)
> 	at scala.Option.getOrElse(Option.scala:189)
> 	at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:142)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GeneratePredicate$.create(GeneratePredicate.scala:35)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.GeneratePredicate$.create(GeneratePredicate.scala:26)
> 	at org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:1263)
> 	at org.apache.spark.sql.catalyst.expressions.Predicate$.createCodeGeneratedObject(predicates.scala:76)
> 	at org.apache.spark.sql.catalyst.expressions.Predicate$.createCodeGeneratedObject(predicates.scala:73)
> 	at org.apache.spark.sql.catalyst.expressions.CodeGeneratorWithInterpretedFallback.createObject(CodeGeneratorWithInterpretedFallback.scala:52)
> 	at org.apache.spark.sql.catalyst.expressions.Predicate$.create(predicates.scala:89)
> 	at org.apache.spark.sql.execution.FileSourceScanExec.dynamicallySelectedPartitions$lzycompute(DataSourceScanExec.scala:239)
> 	at org.apache.spark.sql.execution.FileSourceScanExec.dynamicallySelectedPartitions(DataSourceScanExec.scala:227)
> 	at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:411)
> 	at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:396)
> 	at org.apache.spark.sql.execution.FileSourceScanExec.doExecuteColumnar(DataSourceScanExec.scala:491)
> 	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeColumnar$1(SparkPlan.scala:202)
> 	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213)
> 	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> 	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210)
> 	at org.apache.spark.sql.execution.SparkPlan.executeColumnar(SparkPlan.scala:198)
> 	at org.apache.spark.sql.execution.InputAdapter.doExecuteColumnar(WholeStageCodegenExec.scala:520)
> 	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeColumnar$1(SparkPlan.scala:202)
> 	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213)
> 	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> 	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210)
> 	at org.apache.spark.sql.execution.SparkPlan.executeColumnar(SparkPlan.scala:198)
> 	at org.apache.spark.sql.execution.ColumnarToRowExec.inputRDDs(Columnar.scala:203)
> 	at org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.inputRDDs(BroadcastHashJoinExec.scala:178)
> 	at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)
> 	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:747)
> 	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:175)
> 	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213)
> 	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> 	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210)
> 	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:171)
> 	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:316)
> 	at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:434)
> 	at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:420)
> 	at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:47)
> 	at org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3680)
> 	at org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:2710)
> 	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3671)
> 	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
> 	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
> 	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
> 	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
> 	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
> 	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3669)
> 	at org.apache.spark.sql.Dataset.head(Dataset.scala:2710)
> 	at org.apache.spark.sql.Dataset.take(Dataset.scala:2917)
> 	at org.apache.spark.sql.Dataset.getRows(Dataset.scala:300)
> 	at org.apache.spark.sql.Dataset.showString(Dataset.scala:337)
> 	at org.apache.spark.sql.Dataset.show(Dataset.scala:824)
> 	at org.apache.spark.sql.Dataset.show(Dataset.scala:783)
> 	at org.apache.spark.sql.Dataset.show(Dataset.scala:792)
> 	at org.apache.spark.sql.DataFrameSuite.$anonfun$new$543(DataFrameSuite.scala:2570)
> 	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
> 	at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1403)
> 	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305)
> 	at org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303)
> 	at org.apache.spark.sql.DataFrameSuite.withTable(DataFrameSuite.scala:53)
> 	at org.apache.spark.sql.DataFrameSuite.$anonfun$new$542(DataFrameSuite.scala:2549)
> 	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)
> 	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
> 	at org.apache.spark.sql.DataFrameSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(DataFrameSuite.scala:53)
> 	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:246)
> 	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:244)
> 	at org.apache.spark.sql.DataFrameSuite.withSQLConf(DataFrameSuite.scala:53)
> 	at org.apache.spark.sql.DataFrameSuite.$anonfun$new$541(DataFrameSuite.scala:2549)
> 	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
> 	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
> 	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
> 	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> 	at org.scalatest.Transformer.apply(Transformer.scala:22)
> 	at org.scalatest.Transformer.apply(Transformer.scala:20)
> 	at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:189)
> 	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:164)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:187)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:199)
> 	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:199)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:181)
> 	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:60)
> 	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
> 	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
> 	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:60)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:232)
> 	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
> 	at scala.collection.immutable.List.foreach(List.scala:392)
> 	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> 	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
> 	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:232)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:231)
> 	at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1562)
> 	at org.scalatest.Suite.run(Suite.scala:1112)
> 	at org.scalatest.Suite.run$(Suite.scala:1094)
> 	at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1562)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:236)
> 	at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:236)
> 	at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:235)
> 	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:60)
> 	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
> 	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> 	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> 	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:60)
> 	at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
> 	at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1320)
> 	at org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1314)
> 	at scala.collection.immutable.List.foreach(List.scala:392)
> 	at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1314)
> 	at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
> 	at org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
> 	at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1480)
> 	at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971)
> 	at org.scalatest.tools.Runner$.run(Runner.scala:798)
> 	at org.scalatest.tools.Runner.run(Runner.scala)
> 	at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
> 	at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
> +---+----+
> | id|   k|
> +---+----+
> |  0|0.00|
> |  1|1.00|
> +---+----+
> {noformat}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org