You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Gayathri Murali (JIRA)" <ji...@apache.org> on 2015/10/21 03:12:27 UTC

[jira] [Commented] (SPARK-5818) unable to use "add jar" in hql

    [ https://issues.apache.org/jira/browse/SPARK-5818?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14966065#comment-14966065 ] 

Gayathri Murali commented on SPARK-5818:
----------------------------------------

I am trying to use add jar command on Spark 1.5 and run into issues. The jar is registered with the local maven repo. 

hivecontext.sql("ADD JAR '/Users/xxxxx/.m2/repository/org/apache/hadoop/hive/serde2/TestSerDe/1.0/TestSerDe-1.0.jar'")

This path is correct and the jar file is present in the path. But I am getting the following error

ERROR SparkContext: Jar not found at '/Users/xxxxx/.m2/repository/org/apache/hadoop/hive/serde2/TestSerDe/1.0/TestSerDe-1.0.jar'

However when i add this jar with --jars during spark-submit, it works fine 




> unable to use "add jar" in hql
> ------------------------------
>
>                 Key: SPARK-5818
>                 URL: https://issues.apache.org/jira/browse/SPARK-5818
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.2.0, 1.2.1
>            Reporter: pengxu
>
> In the spark 1.2.1 and 1.2.0, it's unable the use the hive command "add jar"  in hql.
> It seems that the problem in spark-2219 is still existed.
> the problem can be reproduced as described in the below. Suppose the jar file is named brickhouse-0.6.0.jar and is placed in the /tmp directory
> {code}
> spark-shell>import org.apache.spark.sql.hive._
> spark-shell>val sqlContext = new HiveContext(sc)
> spark-shell>import sqlContext._
> spark-shell>hql("add jar /tmp/brickhouse-0.6.0.jar")
> {code}
> the error message is showed as blow
> {code:title=Error Log}
> 15/02/15 01:36:31 ERROR SessionState: Unable to register /tmp/brickhouse-0.6.0.jar
> Exception: org.apache.spark.repl.SparkIMain$TranslatingClassLoader cannot be cast to java.net.URLClassLoader
> java.lang.ClassCastException: org.apache.spark.repl.SparkIMain$TranslatingClassLoader cannot be cast to java.net.URLClassLoader
> 	at org.apache.hadoop.hive.ql.exec.Utilities.addToClassPath(Utilities.java:1921)
> 	at org.apache.hadoop.hive.ql.session.SessionState.registerJar(SessionState.java:599)
> 	at org.apache.hadoop.hive.ql.session.SessionState$ResourceType$2.preHook(SessionState.java:658)
> 	at org.apache.hadoop.hive.ql.session.SessionState.add_resource(SessionState.java:732)
> 	at org.apache.hadoop.hive.ql.session.SessionState.add_resource(SessionState.java:717)
> 	at org.apache.hadoop.hive.ql.processors.AddResourceProcessor.run(AddResourceProcessor.java:54)
> 	at org.apache.spark.sql.hive.HiveContext.runHive(HiveContext.scala:319)
> 	at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:276)
> 	at org.apache.spark.sql.hive.execution.AddJar.sideEffectResult$lzycompute(commands.scala:74)
> 	at org.apache.spark.sql.hive.execution.AddJar.sideEffectResult(commands.scala:73)
> 	at org.apache.spark.sql.execution.Command$class.execute(commands.scala:46)
> 	at org.apache.spark.sql.hive.execution.AddJar.execute(commands.scala:68)
> 	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd$lzycompute(SQLContext.scala:425)
> 	at org.apache.spark.sql.SQLContext$QueryExecution.toRdd(SQLContext.scala:425)
> 	at org.apache.spark.sql.SchemaRDDLike$class.$init$(SchemaRDDLike.scala:58)
> 	at org.apache.spark.sql.SchemaRDD.<init>(SchemaRDD.scala:108)
> 	at org.apache.spark.sql.hive.HiveContext.hiveql(HiveContext.scala:102)
> 	at org.apache.spark.sql.hive.HiveContext.hql(HiveContext.scala:106)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:24)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:29)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:37)
> 	at $line30.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:39)
> 	at $line30.$read$$iwC$$iwC$$iwC.<init>(<console>:41)
> 	at $line30.$read$$iwC$$iwC.<init>(<console>:43)
> 	at $line30.$read$$iwC.<init>(<console>:45)
> 	at $line30.$read.<init>(<console>:47)
> 	at $line30.$read$.<init>(<console>:51)
> 	at $line30.$read$.<clinit>(<console>)
> 	at $line30.$eval$.<init>(<console>:7)
> 	at $line30.$eval$.<clinit>(<console>)
> 	at $line30.$eval.$print(<console>)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:606)
> 	at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)
> 	at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)
> 	at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)
> 	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)
> 	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)
> 	at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:828)
> 	at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:873)
> 	at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:785)
> 	at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:628)
> 	at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:636)
> 	at org.apache.spark.repl.SparkILoop.loop(SparkILoop.scala:641)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:968)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> 	at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)
> 	at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> 	at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)
> 	at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)
> 	at org.apache.spark.repl.Main$.main(Main.scala:31)
> 	at org.apache.spark.repl.Main.main(Main.scala)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:606)
> 	at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)
> 	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)
> 	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org