You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@phoenix.apache.org by Divya Gehlot <di...@gmail.com> on 2016/02/18 09:33:30 UTC
Fwd: Error : starting spark-shell with phoenix client jar
Hi,
I am getting following error while starting spark shell with phoenix
clients
spark-shell --jars
/usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
--driver-class-path
/usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
--master yarn-client
StackTrace :
> INFO TimelineClientImpl: Timeline service address:
> http://ip-xxx-xx-xx-xxx.ap-southeast-1.compute.internal:8188/ws/v1/timeline/
> java.lang.NoSuchMethodError:
> org.codehaus.jackson.map.ObjectMapper.setSerializationInclusion(Lorg/codehaus/jackson/map/annotate/JsonSerialize$Inclusion;)Lorg/codehaus/jackson/map/ObjectMapper;
> at
> org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider.configObjectMapper(YarnJacksonJaxbJsonProvider.java:59)
> at
> org.apache.hadoop.yarn.util.timeline.TimelineUtils.<clinit>(TimelineUtils.java:50)
> at
> org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:172)
> at
> org.apache.hadoop.service.AbstractService.init(AbstractService.java:163)
> at
> org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:108)
> at
> org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
> at
> org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144)
> at org.apache.spark.SparkContext.<init>(SparkContext.scala:523)
> at
> org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
> at $iwC$$iwC.<init>(<console>:9)
> at $iwC.<init>(<console>:18)
> at <init>(<console>:20)
> at .<init>(<console>:24)
> at .<clinit>(<console>)
> at .<init>(<console>:7)
> at .<clinit>(<console>)
> at $print(<console>)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
> at
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
> at
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> at
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
> at
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
> at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
> at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
> at
> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
> at
> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
> at
> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
> at
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
> at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
> at
> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> at
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> at org.apache.spark.repl.SparkILoop.org
> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
> at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
> at org.apache.spark.repl.Main$.main(Main.scala:31)
> at org.apache.spark.repl.Main.main(Main.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
> at
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
> at
> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> java.lang.NullPointerException
> at
> org.apache.spark.sql.execution.ui.SQLListener.<init>(SQLListener.scala:34)
> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:77)
> at
> org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:74)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
> at
> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
> at $iwC$$iwC.<init>(<console>:9)
> at $iwC.<init>(<console>:18)
> at <init>(<console>:20)
> at .<init>(<console>:24)
> at .<clinit>(<console>)
> at .<init>(<console>:7)
> at .<clinit>(<console>)
> at $print(<console>)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
> at
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
> at
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> at
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
> at
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
> at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
> at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
> at
> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
> at
> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
> at
> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
> at
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
> at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
> at
> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> at
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> at org.apache.spark.repl.SparkILoop.org
> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
> at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
> at org.apache.spark.repl.Main$.main(Main.scala:31)
> at org.apache.spark.repl.Main.main(Main.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
> at
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
> at
> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> <console>:10: error: not found: value sqlContext
> import sqlContext.implicits._
> ^
> <console>:10: error: not found: value sqlContext
> import sqlContext.sql
Googled and found there is Jackson dependency is not available for Hadoop
2.x version(SPARK-5108
<https://issues.apache.org/jira/browse/SPARK-5108?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel>
)
Is the above errors related to above mentioned issue .
Thanks
Divya
Re: Error : starting spark-shell with phoenix client jar
Posted by Ankit Singhal <an...@gmail.com>.
Phoenix team is in process of releasing 4.7 soon so for meanwhile you can
ask for a hotfix from your distribution provider.
On Thu, Feb 18, 2016 at 2:35 PM, Divya Gehlot <di...@gmail.com>
wrote:
> Thank you very much Ankit for guiding me in the right direction.
>
> How can I upgrade to phoenix 4.7 ?
> I couldn't find it here <http://apache.arvixe.com/phoenix/>
>
> On 18 February 2016 at 16:43, Ankit Singhal <an...@gmail.com>
> wrote:
>
>> Hi Divya,
>>
>> It is fixed in 4.7 , please find a jira for the same.
>> https://issues.apache.org/jira/browse/PHOENIX-2608
>>
>> Regards,
>> Ankit Singhal
>>
>> On Thu, Feb 18, 2016 at 2:03 PM, Divya Gehlot <di...@gmail.com>
>> wrote:
>>
>>> Hi,
>>> I am getting following error while starting spark shell with phoenix
>>> clients
>>> spark-shell --jars
>>> /usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
>>> --driver-class-path
>>> /usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
>>> --master yarn-client
>>>
>>> StackTrace :
>>>
>>>> INFO TimelineClientImpl: Timeline service address:
>>>> http://ip-xxx-xx-xx-xxx.ap-southeast-1.compute.internal:8188/ws/v1/timeline/
>>>> java.lang.NoSuchMethodError:
>>>> org.codehaus.jackson.map.ObjectMapper.setSerializationInclusion(Lorg/codehaus/jackson/map/annotate/JsonSerialize$Inclusion;)Lorg/codehaus/jackson/map/ObjectMapper;
>>>> at
>>>> org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider.configObjectMapper(YarnJacksonJaxbJsonProvider.java:59)
>>>> at
>>>> org.apache.hadoop.yarn.util.timeline.TimelineUtils.<clinit>(TimelineUtils.java:50)
>>>> at
>>>> org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:172)
>>>> at
>>>> org.apache.hadoop.service.AbstractService.init(AbstractService.java:163)
>>>> at
>>>> org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:108)
>>>> at
>>>> org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
>>>> at
>>>> org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144)
>>>> at org.apache.spark.SparkContext.<init>(SparkContext.scala:523)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
>>>> at $iwC$$iwC.<init>(<console>:9)
>>>> at $iwC.<init>(<console>:18)
>>>> at <init>(<console>:20)
>>>> at .<init>(<console>:24)
>>>> at .<clinit>(<console>)
>>>> at .<init>(<console>:7)
>>>> at .<clinit>(<console>)
>>>> at $print(<console>)
>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>> at
>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>>> at
>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>>> at
>>>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>>>> at
>>>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>>> at
>>>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>>> at org.apache.spark.repl.SparkILoop.org
>>>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>>>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>>>> at org.apache.spark.repl.Main.main(Main.scala)
>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>> at
>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>>> at
>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>> java.lang.NullPointerException
>>>> at
>>>> org.apache.spark.sql.execution.ui.SQLListener.<init>(SQLListener.scala:34)
>>>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:77)
>>>> at
>>>> org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:74)
>>>> at
>>>> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>>> at
>>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>>>> at
>>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>>> at
>>>> java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
>>>> at $iwC$$iwC.<init>(<console>:9)
>>>> at $iwC.<init>(<console>:18)
>>>> at <init>(<console>:20)
>>>> at .<init>(<console>:24)
>>>> at .<clinit>(<console>)
>>>> at .<init>(<console>:7)
>>>> at .<clinit>(<console>)
>>>> at $print(<console>)
>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>> at
>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>>> at
>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>>> at
>>>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>>>> at
>>>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>>>> at
>>>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>>>> at
>>>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>>> at
>>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>>> at
>>>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>>> at org.apache.spark.repl.SparkILoop.org
>>>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>>>> at
>>>> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>>>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>>>> at org.apache.spark.repl.Main.main(Main.scala)
>>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>>> at
>>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>>> at
>>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>>>> at
>>>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>>> <console>:10: error: not found: value sqlContext
>>>> import sqlContext.implicits._
>>>> ^
>>>> <console>:10: error: not found: value sqlContext
>>>> import sqlContext.sql
>>>
>>>
>>>
>>> Googled and found there is Jackson dependency is not available for
>>> Hadoop 2.x version(SPARK-5108
>>> <https://issues.apache.org/jira/browse/SPARK-5108?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel>
>>> )
>>> Is the above errors related to above mentioned issue .
>>>
>>> Thanks
>>> Divya
>>>
>>>
>>>
>>>
>>
>
Re: Error : starting spark-shell with phoenix client jar
Posted by Divya Gehlot <di...@gmail.com>.
Thank you very much Ankit for guiding me in the right direction.
How can I upgrade to phoenix 4.7 ?
I couldn't find it here <http://apache.arvixe.com/phoenix/>
On 18 February 2016 at 16:43, Ankit Singhal <an...@gmail.com>
wrote:
> Hi Divya,
>
> It is fixed in 4.7 , please find a jira for the same.
> https://issues.apache.org/jira/browse/PHOENIX-2608
>
> Regards,
> Ankit Singhal
>
> On Thu, Feb 18, 2016 at 2:03 PM, Divya Gehlot <di...@gmail.com>
> wrote:
>
>> Hi,
>> I am getting following error while starting spark shell with phoenix
>> clients
>> spark-shell --jars
>> /usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
>> --driver-class-path
>> /usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
>> --master yarn-client
>>
>> StackTrace :
>>
>>> INFO TimelineClientImpl: Timeline service address:
>>> http://ip-xxx-xx-xx-xxx.ap-southeast-1.compute.internal:8188/ws/v1/timeline/
>>> java.lang.NoSuchMethodError:
>>> org.codehaus.jackson.map.ObjectMapper.setSerializationInclusion(Lorg/codehaus/jackson/map/annotate/JsonSerialize$Inclusion;)Lorg/codehaus/jackson/map/ObjectMapper;
>>> at
>>> org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider.configObjectMapper(YarnJacksonJaxbJsonProvider.java:59)
>>> at
>>> org.apache.hadoop.yarn.util.timeline.TimelineUtils.<clinit>(TimelineUtils.java:50)
>>> at
>>> org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:172)
>>> at
>>> org.apache.hadoop.service.AbstractService.init(AbstractService.java:163)
>>> at
>>> org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:108)
>>> at
>>> org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
>>> at
>>> org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144)
>>> at org.apache.spark.SparkContext.<init>(SparkContext.scala:523)
>>> at
>>> org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
>>> at $iwC$$iwC.<init>(<console>:9)
>>> at $iwC.<init>(<console>:18)
>>> at <init>(<console>:20)
>>> at .<init>(<console>:24)
>>> at .<clinit>(<console>)
>>> at .<init>(<console>:7)
>>> at .<clinit>(<console>)
>>> at $print(<console>)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>> at
>>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>>> at
>>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>>> at
>>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>>> at
>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>>> at
>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>>> at
>>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>>> at
>>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>>> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>>> at
>>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>>> at
>>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>>> at
>>> org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>>> at
>>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>> at
>>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>> at org.apache.spark.repl.SparkILoop.org
>>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>>> at
>>> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>>> at org.apache.spark.repl.Main.main(Main.scala)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>> java.lang.NullPointerException
>>> at
>>> org.apache.spark.sql.execution.ui.SQLListener.<init>(SQLListener.scala:34)
>>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:77)
>>> at
>>> org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:74)
>>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>>> Method)
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>> at
>>> java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>>> at
>>> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
>>> at $iwC$$iwC.<init>(<console>:9)
>>> at $iwC.<init>(<console>:18)
>>> at <init>(<console>:20)
>>> at .<init>(<console>:24)
>>> at .<clinit>(<console>)
>>> at .<init>(<console>:7)
>>> at .<clinit>(<console>)
>>> at $print(<console>)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>> at
>>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>>> at
>>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>>> at
>>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>>> at
>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>>> at
>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>>> at
>>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>>> at
>>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>>> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>>> at
>>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>>> at
>>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>>> at
>>> org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>>> at
>>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>>> at
>>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>> at
>>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>> at org.apache.spark.repl.SparkILoop.org
>>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>>> at
>>> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>>> at org.apache.spark.repl.Main.main(Main.scala)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>> <console>:10: error: not found: value sqlContext
>>> import sqlContext.implicits._
>>> ^
>>> <console>:10: error: not found: value sqlContext
>>> import sqlContext.sql
>>
>>
>>
>> Googled and found there is Jackson dependency is not available for Hadoop
>> 2.x version(SPARK-5108
>> <https://issues.apache.org/jira/browse/SPARK-5108?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel>
>> )
>> Is the above errors related to above mentioned issue .
>>
>> Thanks
>> Divya
>>
>>
>>
>>
>
Re: Error : starting spark-shell with phoenix client jar
Posted by Ankit Singhal <an...@gmail.com>.
Hi Divya,
It is fixed in 4.7 , please find a jira for the same.
https://issues.apache.org/jira/browse/PHOENIX-2608
Regards,
Ankit Singhal
On Thu, Feb 18, 2016 at 2:03 PM, Divya Gehlot <di...@gmail.com>
wrote:
> Hi,
> I am getting following error while starting spark shell with phoenix
> clients
> spark-shell --jars
> /usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
> --driver-class-path
> /usr/hdp/current/phoenix-client/phoenix-4.4.0.2.3.4.0-3485-client.jar
> --master yarn-client
>
> StackTrace :
>
>> INFO TimelineClientImpl: Timeline service address:
>> http://ip-xxx-xx-xx-xxx.ap-southeast-1.compute.internal:8188/ws/v1/timeline/
>> java.lang.NoSuchMethodError:
>> org.codehaus.jackson.map.ObjectMapper.setSerializationInclusion(Lorg/codehaus/jackson/map/annotate/JsonSerialize$Inclusion;)Lorg/codehaus/jackson/map/ObjectMapper;
>> at
>> org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider.configObjectMapper(YarnJacksonJaxbJsonProvider.java:59)
>> at
>> org.apache.hadoop.yarn.util.timeline.TimelineUtils.<clinit>(TimelineUtils.java:50)
>> at
>> org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:172)
>> at
>> org.apache.hadoop.service.AbstractService.init(AbstractService.java:163)
>> at
>> org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:108)
>> at
>> org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
>> at
>> org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144)
>> at org.apache.spark.SparkContext.<init>(SparkContext.scala:523)
>> at
>> org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1017)
>> at $iwC$$iwC.<init>(<console>:9)
>> at $iwC.<init>(<console>:18)
>> at <init>(<console>:20)
>> at .<init>(<console>:24)
>> at .<clinit>(<console>)
>> at .<init>(<console>:7)
>> at .<clinit>(<console>)
>> at $print(<console>)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:606)
>> at
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>> at
>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>> at
>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>> at
>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>> at
>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>> at
>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>> at
>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>> at
>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:125)
>> at
>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>> at
>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>> at
>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>> at
>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>> at
>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>> at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>> at
>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>> at
>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>> at
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>> at org.apache.spark.repl.SparkILoop.org
>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>> at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>> at org.apache.spark.repl.Main.main(Main.scala)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:606)
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
>> at
>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>> at
>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>> at
>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>> java.lang.NullPointerException
>> at
>> org.apache.spark.sql.execution.ui.SQLListener.<init>(SQLListener.scala:34)
>> at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:77)
>> at
>> org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:74)
>> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> Method)
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>> at
>> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
>> at $iwC$$iwC.<init>(<console>:9)
>> at $iwC.<init>(<console>:18)
>> at <init>(<console>:20)
>> at .<init>(<console>:24)
>> at .<clinit>(<console>)
>> at .<init>(<console>:7)
>> at .<clinit>(<console>)
>> at $print(<console>)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:606)
>> at
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>> at
>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>> at
>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>> at
>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>> at
>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>> at
>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>> at
>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>> at
>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
>> at
>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>> at
>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>> at
>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>> at
>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>> at
>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>> at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>> at
>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>> at
>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>> at
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>> at org.apache.spark.repl.SparkILoop.org
>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>> at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>> at org.apache.spark.repl.Main.main(Main.scala)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:606)
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:685)
>> at
>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>> at
>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>> at
>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>> <console>:10: error: not found: value sqlContext
>> import sqlContext.implicits._
>> ^
>> <console>:10: error: not found: value sqlContext
>> import sqlContext.sql
>
>
>
> Googled and found there is Jackson dependency is not available for Hadoop
> 2.x version(SPARK-5108
> <https://issues.apache.org/jira/browse/SPARK-5108?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel>
> )
> Is the above errors related to above mentioned issue .
>
> Thanks
> Divya
>
>
>
>