You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Sandeep Katta (Jira)" <ji...@apache.org> on 2019/09/26 09:35:00 UTC

[jira] [Issue Comment Deleted] (SPARK-29254) Failed to include jars passed in through --jars when isolatedLoader is enabled

     [ https://issues.apache.org/jira/browse/SPARK-29254?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Sandeep Katta updated SPARK-29254:
----------------------------------
    Comment: was deleted

(was: [~yumwang] I would like to work on it  if you have not started )

> Failed to include jars passed in through --jars when isolatedLoader is enabled
> ------------------------------------------------------------------------------
>
>                 Key: SPARK-29254
>                 URL: https://issues.apache.org/jira/browse/SPARK-29254
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.0.0
>            Reporter: Yuming Wang
>            Priority: Major
>
> Failed to include jars passed in through --jars when {{isolatedLoader}} is enabled({{spark.sql.hive.metastore.jars != builtin}}). How to reproduce:
> {code:scala}
>   test("SPARK-29254: include jars passed in through --jars when isolatedLoader is enabled") {
>     val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
>     val jar1 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassA"))
>     val jar2 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassB"))
>     val jar3 = HiveTestJars.getHiveContribJar.getCanonicalPath
>     val jar4 = HiveTestJars.getHiveHcatalogCoreJar.getCanonicalPath
>     val jarsString = Seq(jar1, jar2, jar3, jar4).map(j => j.toString).mkString(",")
>     val args = Seq(
>       "--class", SparkSubmitClassLoaderTest.getClass.getName.stripSuffix("$"),
>       "--name", "SparkSubmitClassLoaderTest",
>       "--master", "local-cluster[2,1,1024]",
>       "--conf", "spark.ui.enabled=false",
>       "--conf", "spark.master.rest.enabled=false",
>       "--conf", "spark.sql.hive.metastore.version=3.1.2",
>       "--conf", "spark.sql.hive.metastore.jars=maven",
>       "--driver-java-options", "-Dderby.system.durability=test",
>       "--jars", jarsString,
>       unusedJar.toString, "SparkSubmitClassA", "SparkSubmitClassB")
>     runSparkSubmit(args)
>   }
> {code}
> Logs:
> {noformat}
> 2019-09-25 22:11:42.854 - stderr> 19/09/25 22:11:42 ERROR log: error in initSerDe: java.lang.ClassNotFoundException Class org.apache.hive.hcatalog.data.JsonSerDe not found
> 2019-09-25 22:11:42.854 - stderr> java.lang.ClassNotFoundException: Class org.apache.hive.hcatalog.data.JsonSerDe not found
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2101)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:84)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:77)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:289)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:271)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.ql.metadata.Table.getColsInternal(Table.java:663)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:646)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:898)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:937)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$createTable$1(HiveClientImpl.scala:539)
> 2019-09-25 22:11:42.854 - stderr> 	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:311)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:245)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:244)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:294)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.client.HiveClientImpl.createTable(HiveClientImpl.scala:537)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$createTable$1(HiveExternalCatalog.scala:284)
> 2019-09-25 22:11:42.854 - stderr> 	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:99)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.hive.HiveExternalCatalog.createTable(HiveExternalCatalog.scala:242)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.catalyst.catalog.ExternalCatalogWithListener.createTable(ExternalCatalogWithListener.scala:94)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.createTable(SessionCatalog.scala:325)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.execution.command.CreateTableCommand.run(tables.scala:132)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
> 2019-09-25 22:11:42.854 - stderr> 	at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:225)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3372)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$4(SQLExecution.scala:100)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:87)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3368)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:225)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:87)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:550)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.hive.SparkSubmitClassLoaderTest$.main(HiveSparkSubmitSuite.scala:638)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.sql.hive.SparkSubmitClassLoaderTest.main(HiveSparkSubmitSuite.scala)
> 2019-09-25 22:11:42.855 - stderr> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 2019-09-25 22:11:42.855 - stderr> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 2019-09-25 22:11:42.855 - stderr> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 2019-09-25 22:11:42.855 - stderr> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:901)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:179)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:202)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:89)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:980)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:989)
> 2019-09-25 22:11:42.855 - stderr> 	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> {noformat}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org