You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@hive.apache.org by Divya Gehlot <di...@gmail.com> on 2015/12/21 11:05:14 UTC

configure spark for hive context

Hi,
I am trying to configure spark for hive context  (Please dont get mistaken
with hive on spark )
I placed hive-site.xml in spark/CONF_DIR
Now when I run spark-shell I am getting below error
Version which I am using




*Hadoop 2.6.2  Spark 1.5.2   Hive 1.2.1 *


Welcome to
>       ____              __
>      / __/__  ___ _____/ /__
>     _\ \/ _ \/ _ `/ __/  '_/
>    /___/ .__/\_,_/_/ /_/\_\   version 1.5.2
>       /_/
>
> Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java
> 1.8.0_66)
> Type in expressions to have them evaluated.
> Type :help for more information.
> Spark context available as sc.
> java.lang.RuntimeException: java.lang.IllegalArgumentException:
> java.net.URISyntaxException: Relative path in absolute URI:
> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
>     at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
>     at
> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:171)
>     at
> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:162)
>     at
> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:160)
>     at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>     at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>     at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>     at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>     at
> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
>     at $iwC$$iwC.<init>(<console>:9)
>     at $iwC.<init>(<console>:18)
>     at <init>(<console>:20)
>     at .<init>(<console>:24)
>     at .<clinit>(<console>)
>     at .<init>(<console>:7)
>     at .<clinit>(<console>)
>     at $print(<console>)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>     at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:497)
>     at
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>     at
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>     at
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>     at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>     at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>     at
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>     at
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>     at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>     at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
>     at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>     at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>     at
> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>     at
> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>     at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>     at
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>     at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>     at
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>     at
> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>     at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>     at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>     at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>     at
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>     at org.apache.spark.repl.SparkILoop.org
> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>     at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>     at org.apache.spark.repl.Main$.main(Main.scala:31)
>     at org.apache.spark.repl.Main.main(Main.scala)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>     at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:497)
>     at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:674)
>     at
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>     at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>     at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>     at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.IllegalArgumentException:
> java.net.URISyntaxException: Relative path in absolute URI:
> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
>     at org.apache.hadoop.fs.Path.initialize(Path.java:206)
>     at org.apache.hadoop.fs.Path.<init>(Path.java:172)
>     at
> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:563)
>     at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
>     ... 56 more
> Caused by: java.net.URISyntaxException: Relative path in absolute URI:
> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
>     at java.net.URI.checkPath(URI.java:1823)
>     at java.net.URI.<init>(URI.java:745)
>     at org.apache.hadoop.fs.Path.initialize(Path.java:203)
>     ... 59 more
>
> <console>:10: error: not found: value sqlContext
>        import sqlContext.implicits._
>               ^
> <console>:10: error: not found: value sqlContext
>        import sqlContext.sql
>

Re: configure spark for hive context

Posted by Akhil Das <ak...@sigmoidanalytics.com>.
Looks like you put a wrong configuration file which crashed spark to parse
the configuration values from it.

Thanks
Best Regards

On Mon, Dec 21, 2015 at 3:35 PM, Divya Gehlot <di...@gmail.com>
wrote:

> Hi,
> I am trying to configure spark for hive context  (Please dont get mistaken
> with hive on spark )
> I placed hive-site.xml in spark/CONF_DIR
> Now when I run spark-shell I am getting below error
> Version which I am using
>
>
>
>
> *Hadoop 2.6.2  Spark 1.5.2   Hive 1.2.1 *
>
>
> Welcome to
>>       ____              __
>>      / __/__  ___ _____/ /__
>>     _\ \/ _ \/ _ `/ __/  '_/
>>    /___/ .__/\_,_/_/ /_/\_\   version 1.5.2
>>       /_/
>>
>> Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java
>> 1.8.0_66)
>> Type in expressions to have them evaluated.
>> Type :help for more information.
>> Spark context available as sc.
>> java.lang.RuntimeException: java.lang.IllegalArgumentException:
>> java.net.URISyntaxException: Relative path in absolute URI:
>> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
>>     at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
>>     at
>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:171)
>>     at
>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:162)
>>     at
>> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:160)
>>     at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
>>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>> Method)
>>     at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>>     at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>     at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>>     at
>> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
>>     at $iwC$$iwC.<init>(<console>:9)
>>     at $iwC.<init>(<console>:18)
>>     at <init>(<console>:20)
>>     at .<init>(<console>:24)
>>     at .<clinit>(<console>)
>>     at .<init>(<console>:7)
>>     at .<clinit>(<console>)
>>     at $print(<console>)
>>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>     at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>     at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>     at java.lang.reflect.Method.invoke(Method.java:497)
>>     at
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>>     at
>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>>     at
>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>>     at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>>     at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>>     at
>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>>     at
>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>>     at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>>     at
>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
>>     at
>> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
>>     at
>> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
>>     at
>> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
>>     at
>> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>>     at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
>>     at
>> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
>>     at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
>>     at
>> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
>>     at
>> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
>>     at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
>>     at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>     at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>     at
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>     at org.apache.spark.repl.SparkILoop.org
>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>>     at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>>     at org.apache.spark.repl.Main$.main(Main.scala:31)
>>     at org.apache.spark.repl.Main.main(Main.scala)
>>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>>     at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>>     at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>     at java.lang.reflect.Method.invoke(Method.java:497)
>>     at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:674)
>>     at
>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
>>     at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
>>     at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
>>     at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>> Caused by: java.lang.IllegalArgumentException:
>> java.net.URISyntaxException: Relative path in absolute URI:
>> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
>>     at org.apache.hadoop.fs.Path.initialize(Path.java:206)
>>     at org.apache.hadoop.fs.Path.<init>(Path.java:172)
>>     at
>> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:563)
>>     at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
>>     ... 56 more
>> Caused by: java.net.URISyntaxException: Relative path in absolute URI:
>> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
>>     at java.net.URI.checkPath(URI.java:1823)
>>     at java.net.URI.<init>(URI.java:745)
>>     at org.apache.hadoop.fs.Path.initialize(Path.java:203)
>>     ... 59 more
>>
>> <console>:10: error: not found: value sqlContext
>>        import sqlContext.implicits._
>>               ^
>> <console>:10: error: not found: value sqlContext
>>        import sqlContext.sql
>>
>
>

RE: configure spark for hive context

Posted by Mich Talebzadeh <mi...@peridale.co.uk>.
Hi

 

Mine works fine and connects to Hive metastore (components the same version as yours)

 

spark-sql --master spark://50.140.197.217:7077

15/12/21 10:31:58 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable

15/12/21 10:31:58 INFO metastore: Trying to connect to metastore with URI thrift://localhost:9083

15/12/21 10:31:58 INFO metastore: Connected to metastore.

15/12/21 10:31:58 INFO SessionState: Created local directory: /tmp/hive/5105cd07-9461-4251-a3e9-864c887e5d7f_resources

15/12/21 10:31:58 INFO SessionState: Created HDFS directory: /tmp/hive/hduser/5105cd07-9461-4251-a3e9-864c887e5d7f

15/12/21 10:31:58 INFO SessionState: Created local directory: /tmp/hive/5105cd07-9461-4251-a3e9-864c887e5d7f

15/12/21 10:31:58 INFO SessionState: Created HDFS directory: /tmp/hive/hduser/5105cd07-9461-4251-a3e9-864c887e5d7f/_tmp_space.db

15/12/21 10:31:58 INFO SparkContext: Running Spark version 1.5.2

15/12/21 10:31:58 INFO SecurityManager: Changing view acls to: hduser

15/12/21 10:31:58 INFO SecurityManager: Changing modify acls to: hduser

15/12/21 10:31:58 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(hduser); users with modify permissions: Set(hduser)

15/12/21 10:31:59 INFO Slf4jLogger: Slf4jLogger started

15/12/21 10:31:59 INFO Remoting: Starting remoting

15/12/21 10:31:59 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@50.140.197.217:29266]

15/12/21 10:31:59 INFO Utils: Successfully started service 'sparkDriver' on port 29266.

15/12/21 10:31:59 INFO SparkEnv: Registering MapOutputTracker

15/12/21 10:31:59 INFO SparkEnv: Registering BlockManagerMaster

15/12/21 10:31:59 INFO DiskBlockManager: Created local directory at /work/tmp/blockmgr-be06a770-2eb3-4fa6-ac98-470b073892ef

15/12/21 10:31:59 INFO MemoryStore: MemoryStore started with capacity 529.9 MB

15/12/21 10:31:59 INFO HttpFileServer: HTTP File server directory is /work/tmp/spark-b4e8a049-5149-40aa-98cb-82429228e3a4/httpd-a8520eb1-b901-49cb-a0de-20e7d05c361b

15/12/21 10:31:59 INFO HttpServer: Starting HTTP Server

15/12/21 10:31:59 INFO Utils: Successfully started service 'HTTP file server' on port 9495.

15/12/21 10:31:59 INFO SparkEnv: Registering OutputCommitCoordinator

15/12/21 10:32:04 INFO Utils: Successfully started service 'SparkUI' on port 4040.

15/12/21 10:32:04 INFO SparkUI: Started SparkUI at http://50.140.197.217:4040

15/12/21 10:32:04 WARN MetricsSystem: Using default name DAGScheduler for source because spark.app.id is not set.

15/12/21 10:32:04 INFO AppClient$ClientEndpoint: Connecting to master spark://50.140.197.217:7077...

15/12/21 10:32:05 INFO SparkDeploySchedulerBackend: Connected to Spark cluster with app ID app-20151221103205-0002

15/12/21 10:32:05 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43021.

15/12/21 10:32:05 INFO NettyBlockTransferService: Server created on 43021

15/12/21 10:32:05 INFO BlockManagerMaster: Trying to register BlockManager

15/12/21 10:32:05 INFO BlockManagerMasterEndpoint: Registering block manager 50.140.197.217:43021 with 529.9 MB RAM, BlockManagerId(driver, 50.140.197.217, 43021)

15/12/21 10:32:05 INFO BlockManagerMaster: Registered BlockManager

15/12/21 10:32:05 INFO SparkDeploySchedulerBackend: SchedulerBackend is ready for scheduling beginning after reached minRegisteredResourcesRatio: 0.0

15/12/21 10:32:05 INFO HiveContext: Initializing execution hive, version 1.2.1

15/12/21 10:32:05 INFO ClientWrapper: Inspected Hadoop version: 2.6.0

15/12/21 10:32:05 INFO ClientWrapper: Loaded org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version 2.6.0

SET hive.support.sql11.reserved.keywords=false

15/12/21 10:32:05 INFO HiveContext: default warehouse location is /user/hive/warehouse

15/12/21 10:32:05 INFO HiveContext: Initializing HiveMetastoreConnection version 1.2.1 using Spark classes.

15/12/21 10:32:05 INFO ClientWrapper: Inspected Hadoop version: 2.6.0

15/12/21 10:32:05 INFO ClientWrapper: Loaded org.apache.hadoop.hive.shims.Hadoop23Shims for Hadoop version 2.6.0

15/12/21 10:32:06 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable

15/12/21 10:32:06 INFO metastore: Trying to connect to metastore with URI thrift://localhost:9083

15/12/21 10:32:06 INFO metastore: Connected to metastore.

15/12/21 10:32:06 INFO SessionState: Created local directory: /tmp/hive/def69f76-1377-4646-a72c-6ee264e54ffc_resources

15/12/21 10:32:06 INFO SessionState: Created HDFS directory: /tmp/hive/hduser/def69f76-1377-4646-a72c-6ee264e54ffc

15/12/21 10:32:06 INFO SessionState: Created local directory: /tmp/hive/def69f76-1377-4646-a72c-6ee264e54ffc

15/12/21 10:32:06 INFO SessionState: Created HDFS directory: /tmp/hive/hduser/def69f76-1377-4646-a72c-6ee264e54ffc/_tmp_space.db

SET spark.sql.hive.version=1.2.1

SET spark.sql.hive.version=1.2.1

 

Mich Talebzadeh

 

Sybase ASE 15 Gold Medal Award 2008

A Winning Strategy: Running the most Critical Financial Data on ASE 15

http://login.sybase.com/files/Product_Overviews/ASE-Winning-Strategy-091908.pdf

Author of the books "A Practitioner’s Guide to Upgrading to Sybase ASE 15", ISBN 978-0-9563693-0-7. 

co-author "Sybase Transact SQL Guidelines Best Practices", ISBN 978-0-9759693-0-4

Publications due shortly:

Complex Event Processing in Heterogeneous Environments, ISBN: 978-0-9563693-3-8

Oracle and Sybase, Concepts and Contrasts, ISBN: 978-0-9563693-1-4, volume one out shortly

 

http://talebzadehmich.wordpress.com <http://talebzadehmich.wordpress.com/> 

 

NOTE: The information in this email is proprietary and confidential. This message is for the designated recipient only, if you are not the intended recipient, you should destroy it immediately. Any information in this message shall not be understood as given or endorsed by Peridale Technology Ltd, its subsidiaries or their employees, unless expressly so stated. It is the responsibility of the recipient to ensure that this email is virus free, therefore neither Peridale Ltd, its subsidiaries nor their employees accept any responsibility.

 

From: Divya Gehlot [mailto:divya.htconex@gmail.com] 
Sent: 21 December 2015 10:05
To: user @spark <us...@spark.apache.org>; user@hive.apache.org
Subject: configure spark for hive context

 

Hi,

I am trying to configure spark for hive context  (Please dont get mistaken with hive on spark )

I placed hive-site.xml in spark/CONF_DIR 

Now when I run spark-shell I am getting below error 

Version which I am using 
 Hadoop 2.6.2 
 Spark 1.5.2 
  Hive 1.2.1 

 

 

Welcome to
      ____              __
     / __/__  ___ _____/ /__
    _\ \/ _ \/ _ `/ __/  '_/
   /___/ .__/\_,_/_/ /_/\_\   version 1.5.2
      /_/

Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_66)
Type in expressions to have them evaluated.
Type :help for more information.
Spark context available as sc.
java.lang.RuntimeException: java.lang.IllegalArgumentException: java.net.URISyntaxException: Relative path in absolute URI: ${system:java.io.tmpdir%7D/$%7Bsystem:user.name <http://user.name> %7D
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
    at org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:171)
    at org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:162)
    at org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:160)
    at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
    at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
    at $iwC$$iwC.<init>(<console>:9)
    at $iwC.<init>(<console>:18)
    at <init>(<console>:20)
    at .<init>(<console>:24)
    at .<clinit>(<console>)
    at .<init>(<console>:7)
    at .<clinit>(<console>)
    at $print(<console>)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
    at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
    at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
    at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
    at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
    at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
    at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
    at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
    at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
    at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
    at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
    at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
    at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
    at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
    at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
    at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    at org.apache.spark.repl.SparkILoop.org <http://org.apache.spark.repl.SparkILoop.org> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
    at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
    at org.apache.spark.repl.Main$.main(Main.scala:31)
    at org.apache.spark.repl.Main.main(Main.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:674)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.IllegalArgumentException: java.net.URISyntaxException: Relative path in absolute URI: ${system:java.io.tmpdir%7D/$%7Bsystem:user.name <http://user.name> %7D
    at org.apache.hadoop.fs.Path.initialize(Path.java:206)
    at org.apache.hadoop.fs.Path.<init>(Path.java:172)
    at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:563)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
    ... 56 more
Caused by: java.net.URISyntaxException: Relative path in absolute URI: ${system:java.io.tmpdir%7D/$%7Bsystem:user.name <http://user.name> %7D
    at java.net.URI.checkPath(URI.java:1823)
    at java.net.URI.<init>(URI.java:745)
    at org.apache.hadoop.fs.Path.initialize(Path.java:203)
    ... 59 more

<console>:10: error: not found: value sqlContext
       import sqlContext.implicits._
              ^
<console>:10: error: not found: value sqlContext
       import sqlContext.sql