You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Yang Jie (Jira)" <ji...@apache.org> on 2023/08/26 09:32:00 UTC

[jira] [Resolved] (SPARK-44968) Downgrade ivy to 2.5.1

     [ https://issues.apache.org/jira/browse/SPARK-44968?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Yang Jie resolved SPARK-44968.
------------------------------
    Fix Version/s: 3.5.0
                   4.0.0
       Resolution: Fixed

Issue resolved by pull request 42668
[https://github.com/apache/spark/pull/42668]

> Downgrade ivy to 2.5.1
> ----------------------
>
>                 Key: SPARK-44968
>                 URL: https://issues.apache.org/jira/browse/SPARK-44968
>             Project: Spark
>          Issue Type: Improvement
>          Components: Build
>    Affects Versions: 3.5.0, 4.0.0
>            Reporter: Yang Jie
>            Assignee: Yang Jie
>            Priority: Major
>             Fix For: 3.5.0, 4.0.0
>
>
> After upgrading Ivy to version 2.5.2, the daily tests for Java 11 and Java 17 began to experience aborted in the HiveExternalCatalogVersionsSuite.
> {code:java}
> 2023-08-23T23:00:49.6547573Z [info]   2023-08-23 16:00:48.209 - stdout> : java.lang.RuntimeException: problem during retrieve of org.apache.spark#spark-submit-parent-4c061f04-b951-4d06-8909-cde5452988d9: java.lang.RuntimeException: Multiple artifacts of the module log4j#log4j;1.2.17 are retrieved to the same file! Update the retrieve pattern to fix this error.
> 2023-08-23T23:00:49.6548745Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:238)
> 2023-08-23T23:00:49.6549572Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:89)
> 2023-08-23T23:00:49.6550334Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.Ivy.retrieve(Ivy.java:551)
> 2023-08-23T23:00:49.6551079Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.deploy.SparkSubmitUtils$.resolveMavenCoordinates(SparkSubmit.scala:1464)
> 2023-08-23T23:00:49.6552024Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.$anonfun$downloadVersion$2(IsolatedClientLoader.scala:138)
> 2023-08-23T23:00:49.6552884Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:42)
> 2023-08-23T23:00:49.6553755Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.downloadVersion(IsolatedClientLoader.scala:138)
> 2023-08-23T23:00:49.6554705Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.liftedTree1$1(IsolatedClientLoader.scala:65)
> 2023-08-23T23:00:49.6555637Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.client.IsolatedClientLoader$.forVersion(IsolatedClientLoader.scala:64)
> 2023-08-23T23:00:49.6556554Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:443)
> 2023-08-23T23:00:49.6557340Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:356)
> 2023-08-23T23:00:49.6558187Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:71)
> 2023-08-23T23:00:49.6559061Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:70)
> 2023-08-23T23:00:49.6559962Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:224)
> 2023-08-23T23:00:49.6560766Z [info]   2023-08-23 16:00:48.209 - stdout>     at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
> 2023-08-23T23:00:49.6561584Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:102)
> 2023-08-23T23:00:49.6562510Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:224)
> 2023-08-23T23:00:49.6563435Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)
> 2023-08-23T23:00:49.6564323Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)
> 2023-08-23T23:00:49.6565340Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:45)
> 2023-08-23T23:00:49.6566321Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:60)
> 2023-08-23T23:00:49.6567363Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:118)
> 2023-08-23T23:00:49.6568372Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:118)
> 2023-08-23T23:00:49.6569393Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.catalog.SessionCatalog.tableExists(SessionCatalog.scala:490)
> 2023-08-23T23:00:49.6570685Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.run(createDataSourceTables.scala:155)
> 2023-08-23T23:00:49.6571842Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)
> 2023-08-23T23:00:49.6572932Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)
> 2023-08-23T23:00:49.6573996Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)
> 2023-08-23T23:00:49.6575045Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
> 2023-08-23T23:00:49.6576066Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
> 2023-08-23T23:00:49.6576937Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
> 2023-08-23T23:00:49.6577807Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
> 2023-08-23T23:00:49.6578620Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
> 2023-08-23T23:00:49.6579432Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
> 2023-08-23T23:00:49.6580357Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
> 2023-08-23T23:00:49.6581331Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
> 2023-08-23T23:00:49.6582239Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
> 2023-08-23T23:00:49.6583101Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
> 2023-08-23T23:00:49.6584088Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
> 2023-08-23T23:00:49.6585236Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
> 2023-08-23T23:00:49.6586519Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
> 2023-08-23T23:00:49.6587686Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
> 2023-08-23T23:00:49.6588898Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
> 2023-08-23T23:00:49.6590014Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
> 2023-08-23T23:00:49.6590993Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
> 2023-08-23T23:00:49.6591930Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
> 2023-08-23T23:00:49.6592914Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
> 2023-08-23T23:00:49.6593856Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
> 2023-08-23T23:00:49.6594687Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
> 2023-08-23T23:00:49.6595379Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
> 2023-08-23T23:00:49.6596103Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
> 2023-08-23T23:00:49.6596807Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
> 2023-08-23T23:00:49.6597520Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:618)
> 2023-08-23T23:00:49.6598276Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
> 2023-08-23T23:00:49.6599022Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)
> 2023-08-23T23:00:49.6599819Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 2023-08-23T23:00:49.6600723Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
> 2023-08-23T23:00:49.6601707Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 2023-08-23T23:00:49.6602513Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/java.lang.reflect.Method.invoke(Method.java:568)
> 2023-08-23T23:00:49.6603272Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
> 2023-08-23T23:00:49.6604007Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
> 2023-08-23T23:00:49.6604724Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.Gateway.invoke(Gateway.java:282)
> 2023-08-23T23:00:49.6605416Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
> 2023-08-23T23:00:49.6606209Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.commands.CallCommand.execute(CallCommand.java:79)
> 2023-08-23T23:00:49.6606969Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
> 2023-08-23T23:00:49.6607743Z [info]   2023-08-23 16:00:48.209 - stdout>     at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
> 2023-08-23T23:00:49.6608415Z [info]   2023-08-23 16:00:48.209 - stdout>     at java.base/java.lang.Thread.run(Thread.java:833)
> 2023-08-23T23:00:49.6609288Z [info]   2023-08-23 16:00:48.209 - stdout> Caused by: java.lang.RuntimeException: Multiple artifacts of the module log4j#log4j;1.2.17 are retrieved to the same file! Update the retrieve pattern to fix this error.
> 2023-08-23T23:00:49.6610288Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.determineArtifactsToCopy(RetrieveEngine.java:426)
> 2023-08-23T23:00:49.6611332Z [info]   2023-08-23 16:00:48.209 - stdout>     at org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:122)
> 2023-08-23T23:00:49.6612046Z [info]   2023-08-23 16:00:48.209 - stdout>     ... 66 more
> 2023-08-23T23:00:49.6612498Z [info]   2023-08-23 16:00:48.209 - stdout>  {code}
> Java 11
>  
>  * [https://github.com/apache/spark/actions/runs/5953716283/job/16148657660]
>  * [https://github.com/apache/spark/actions/runs/5966131923/job/16185159550]
>  
> Java 17
>  * [https://github.com/apache/spark/actions/runs/5956925790/job/16158714165]
>  * [https://github.com/apache/spark/actions/runs/5969348559/job/16195073478]



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org