You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by Chopinxb <gi...@git.apache.org> on 2017/08/08 09:24:20 UTC

[GitHub] spark issue #17342: [SPARK-12868][SQL] Allow adding jars from hdfs

Github user Chopinxb commented on the issue:

    https://github.com/apache/spark/pull/17342
  
    Have u tried it in yarn-client mode? i add this path in v2.1.1 + Hadoop 2.6.0, when i run "add jar" through SparkSQL CLI , it comes out this error:
    ERROR thriftserver.SparkSQLDriver: Failed in [add jar  hdfs://SunshineNameNode3:8020/lib/clouddata-common-lib/chardet-0.0.1.jar]
    java.lang.ExceptionInInitializerError
    	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:662)
    	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:889)
    	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:947)
    	at java.io.DataInputStream.read(DataInputStream.java:100)
    	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:85)
    	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:59)
    	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:119)
    	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:369)
    	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:341)
    	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:292)
    	at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2107)
    	at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2076)
    	at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2052)
    	at org.apache.hadoop.hive.ql.session.SessionState.downloadResource(SessionState.java:1274)
    	at org.apache.hadoop.hive.ql.session.SessionState.resolveAndDownload(SessionState.java:1242)
    	at org.apache.hadoop.hive.ql.session.SessionState.add_resources(SessionState.java:1163)
    	at org.apache.hadoop.hive.ql.session.SessionState.add_resources(SessionState.java:1149)
    	at org.apache.hadoop.hive.ql.processors.AddResourceProcessor.run(AddResourceProcessor.java:67)
    	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:632)
    	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:601)
    	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:278)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:267)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.runHive(HiveClientImpl.scala:601)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.runSqlHive(HiveClientImpl.scala:591)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.addJar(HiveClientImpl.scala:738)
    	at org.apache.spark.sql.hive.HiveSessionState.addJar(HiveSessionState.scala:105)
    	at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
    	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
    	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
    	at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
    	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
    	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
    	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
    	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
    	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
    	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
    	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
    	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
    	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
    	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
    	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592)
    	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:699)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:62)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:335)
    	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:247)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743)
    	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
    	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
    	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
    	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
    Caused by: java.lang.NullPointerException
    	at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:746)
    	at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:376)
    	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:662)
    	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:889)
    	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:947)
    	at java.io.DataInputStream.read(DataInputStream.java:100)
    	at java.nio.file.Files.copy(Files.java:2908)
    	at java.nio.file.Files.copy(Files.java:3027)
    	at sun.net.www.protocol.jar.URLJarFile$1.run(URLJarFile.java:220)
    	at sun.net.www.protocol.jar.URLJarFile$1.run(URLJarFile.java:216)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at sun.net.www.protocol.jar.URLJarFile.retrieve(URLJarFile.java:215)
    	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:71)
    	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:84)
    	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
    	at sun.net.www.protocol.jar.JarURLConnection.getJarFile(JarURLConnection.java:89)
    	at sun.misc.URLClassPath$JarLoader.getJarFile(URLClassPath.java:934)
    	at sun.misc.URLClassPath$JarLoader.access$800(URLClassPath.java:791)
    	at sun.misc.URLClassPath$JarLoader$1.run(URLClassPath.java:876)
    	at sun.misc.URLClassPath$JarLoader$1.run(URLClassPath.java:869)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at sun.misc.URLClassPath$JarLoader.ensureOpen(URLClassPath.java:868)
    	at sun.misc.URLClassPath$JarLoader.<init>(URLClassPath.java:819)
    	at sun.misc.URLClassPath$3.run(URLClassPath.java:565)
    	at sun.misc.URLClassPath$3.run(URLClassPath.java:555)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at sun.misc.URLClassPath.getLoader(URLClassPath.java:554)
    	at sun.misc.URLClassPath.getLoader(URLClassPath.java:519)
    	at sun.misc.URLClassPath.getNextLoader(URLClassPath.java:484)
    	at sun.misc.URLClassPath.access$100(URLClassPath.java:65)
    	at sun.misc.URLClassPath$1.next(URLClassPath.java:266)
    	at sun.misc.URLClassPath$1.hasMoreElements(URLClassPath.java:277)
    	at java.net.URLClassLoader$3$1.run(URLClassLoader.java:601)
    	at java.net.URLClassLoader$3$1.run(URLClassLoader.java:599)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at java.net.URLClassLoader$3.next(URLClassLoader.java:598)
    	at java.net.URLClassLoader$3.hasMoreElements(URLClassLoader.java:623)
    	at sun.misc.CompoundEnumeration.next(CompoundEnumeration.java:45)
    	at sun.misc.CompoundEnumeration.hasMoreElements(CompoundEnumeration.java:54)
    	at org.apache.commons.logging.LogFactory.getConfigurationFile(LogFactory.java:1409)
    	at org.apache.commons.logging.LogFactory.getFactory(LogFactory.java:455)
    	at org.apache.commons.logging.LogFactory.getLog(LogFactory.java:657)
    	at org.apache.hadoop.hdfs.BlockReaderFactory.<clinit>(BlockReaderFactory.java:77)
    	... 58 more
    java.lang.ExceptionInInitializerError
    	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:662)
    	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:889)
    	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:947)
    	at java.io.DataInputStream.read(DataInputStream.java:100)
    	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:85)
    	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:59)
    	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:119)
    	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:369)
    	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:341)
    	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:292)
    	at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2107)
    	at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2076)
    	at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:2052)
    	at org.apache.hadoop.hive.ql.session.SessionState.downloadResource(SessionState.java:1274)
    	at org.apache.hadoop.hive.ql.session.SessionState.resolveAndDownload(SessionState.java:1242)
    	at org.apache.hadoop.hive.ql.session.SessionState.add_resources(SessionState.java:1163)
    	at org.apache.hadoop.hive.ql.session.SessionState.add_resources(SessionState.java:1149)
    	at org.apache.hadoop.hive.ql.processors.AddResourceProcessor.run(AddResourceProcessor.java:67)
    	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:632)
    	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:601)
    	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:278)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:267)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.runHive(HiveClientImpl.scala:601)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.runSqlHive(HiveClientImpl.scala:591)
    	at org.apache.spark.sql.hive.client.HiveClientImpl.addJar(HiveClientImpl.scala:738)
    	at org.apache.spark.sql.hive.HiveSessionState.addJar(HiveSessionState.scala:105)
    	at org.apache.spark.sql.execution.command.AddJarCommand.run(resources.scala:40)
    	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
    	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
    	at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
    	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
    	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
    	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
    	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
    	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
    	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
    	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
    	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
    	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
    	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
    	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592)
    	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:699)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:62)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:335)
    	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:247)
    	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
    	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    	at java.lang.reflect.Method.invoke(Method.java:498)
    	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743)
    	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
    	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
    	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
    	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
    Caused by: java.lang.NullPointerException
    	at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:746)
    	at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:376)
    	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:662)
    	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:889)
    	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:947)
    	at java.io.DataInputStream.read(DataInputStream.java:100)
    	at java.nio.file.Files.copy(Files.java:2908)
    	at java.nio.file.Files.copy(Files.java:3027)
    	at sun.net.www.protocol.jar.URLJarFile$1.run(URLJarFile.java:220)
    	at sun.net.www.protocol.jar.URLJarFile$1.run(URLJarFile.java:216)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at sun.net.www.protocol.jar.URLJarFile.retrieve(URLJarFile.java:215)
    	at sun.net.www.protocol.jar.URLJarFile.getJarFile(URLJarFile.java:71)
    	at sun.net.www.protocol.jar.JarFileFactory.get(JarFileFactory.java:84)
    	at sun.net.www.protocol.jar.JarURLConnection.connect(JarURLConnection.java:122)
    	at sun.net.www.protocol.jar.JarURLConnection.getJarFile(JarURLConnection.java:89)
    	at sun.misc.URLClassPath$JarLoader.getJarFile(URLClassPath.java:934)
    	at sun.misc.URLClassPath$JarLoader.access$800(URLClassPath.java:791)
    	at sun.misc.URLClassPath$JarLoader$1.run(URLClassPath.java:876)
    	at sun.misc.URLClassPath$JarLoader$1.run(URLClassPath.java:869)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at sun.misc.URLClassPath$JarLoader.ensureOpen(URLClassPath.java:868)
    	at sun.misc.URLClassPath$JarLoader.<init>(URLClassPath.java:819)
    	at sun.misc.URLClassPath$3.run(URLClassPath.java:565)
    	at sun.misc.URLClassPath$3.run(URLClassPath.java:555)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at sun.misc.URLClassPath.getLoader(URLClassPath.java:554)
    	at sun.misc.URLClassPath.getLoader(URLClassPath.java:519)
    	at sun.misc.URLClassPath.getNextLoader(URLClassPath.java:484)
    	at sun.misc.URLClassPath.access$100(URLClassPath.java:65)
    	at sun.misc.URLClassPath$1.next(URLClassPath.java:266)
    	at sun.misc.URLClassPath$1.hasMoreElements(URLClassPath.java:277)
    	at java.net.URLClassLoader$3$1.run(URLClassLoader.java:601)
    	at java.net.URLClassLoader$3$1.run(URLClassLoader.java:599)
    	at java.security.AccessController.doPrivileged(Native Method)
    	at java.net.URLClassLoader$3.next(URLClassLoader.java:598)
    	at java.net.URLClassLoader$3.hasMoreElements(URLClassLoader.java:623)
    	at sun.misc.CompoundEnumeration.next(CompoundEnumeration.java:45)
    	at sun.misc.CompoundEnumeration.hasMoreElements(CompoundEnumeration.java:54)
    	at org.apache.commons.logging.LogFactory.getConfigurationFile(LogFactory.java:1409)
    	at org.apache.commons.logging.LogFactory.getFactory(LogFactory.java:455)
    	at org.apache.commons.logging.LogFactory.getLog(LogFactory.java:657)
    	at org.apache.hadoop.hdfs.BlockReaderFactory.<clinit>(BlockReaderFactory.java:77)


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org