You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Marco Mistroni <mm...@gmail.com> on 2015/10/13 00:11:58 UTC
Problem installing Sparck on Windows 8
HI all
i have downloaded spark-1.5.1-bin-hadoop.2.4
i have extracted it on my machine, but when i go to the \bin directory and
invoke
spark-shell i get the following exception
Could anyone assist pls?
I followed instructions in ebook Learning Spark, but mayb the instructions
are old?
kr
marco
15/10/12 23:10:29 WARN ObjectStore: Failed to get database default,
returning No
SuchObjectException
15/10/12 23:10:30 WARN : Your hostname, MarcoLaptop resolves to a
loopback/non-r
eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't
find any
external IP address!
java.lang.RuntimeException: java.lang.NullPointerException
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:522)
at
org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
cala:171)
at
org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
ntext.scala:162)
at
org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala
:160)
at
org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstruct
orAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingC
onstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:10
28)
at $iwC$$iwC.<init>(<console>:9)
at $iwC.<init>(<console>:18)
at <init>(<console>:20)
at .<init>(<console>:24)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:
1065)
at
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:
1340)
at
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840
)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:8
57)
at
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.sca
la:902)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at
org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
(SparkILoopInit.scala:132)
at
org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
(SparkILoopInit.scala:124)
at
org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
at
org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoop
Init.scala:124)
at
org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
at
org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.s
cala:159)
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
at
org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkIL
oopInit.scala:108)
at
org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:
64)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply(SparkILoop.scala:945)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply(SparkILoop.scala:945)
at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClass
Loader.scala:135)
at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$pr
ocess(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSub
mit$$runMain(SparkSubmit.scala:672)
at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:18
0)
at
org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.NullPointerException
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:445)
at org.apache.hadoop.util.Shell.run(Shell.java:418)
at
org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:
650)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:739)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:722)
at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
at
org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.
loadPermissionInfo(RawLocalFileSystem.java:559)
at
org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.
getPermission(RawLocalFileSystem.java:534)
at
org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(Sess
ionState.java:599)
at
org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(Sess
ionState.java:554)
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:508)
... 56 more
<console>:10: error: not found: value sqlContext
import sqlContext.implicits._
^
<console>:10: error: not found: value sqlContext
import sqlContext.sql
^
Re: Problem installing Sparck on Windows 8
Posted by Marco Mistroni <mm...@gmail.com>.
HI
still having issues in installing spark on windows 8
the spark web console runs successfully.. i can run spark pi example,
however wheni run spark-shell i am getting the following exception
java.lang.RuntimeException: java.lang.RuntimeException: The root scratch
dir: /t
mp/hive on HDFS should be writable. Current permissions are: ---------
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:522)
at
org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
cala:171)
at
org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
ntext.scala:162)
at
org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala
:160)
at
org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
Method)
i have amended the permisisons to full access for my account on windows
8... i am trying to understand if this exception can be ablocker if i
decide to submt tasks to spark
given the fact that i can run spark example, this does no tlook like a
blocker but seeing exceptions when i launch the spark shell does not make
me feel comfortable, expecially if i dont understand why i am having
exception
doesn't spark like windows 8?
any suggestions appreciated
kind regards
marco
On Thu, Oct 15, 2015 at 11:40 PM, Marco Mistroni <mm...@gmail.com>
wrote:
> Hi
> i t ried to set this variable in my windows env variables but got same
> result
> this si the result of calling set in my command prompt
> have i amended it in the wrong place?
>
> kr
> marco
> ......
> USERDOMAIN=MarcoLaptop
> USERDOMAIN_ROAMINGPROFILE=MarcoLaptop
> USERNAME=marco
> USERPROFILE=C:\Users\marco
> windir=C:\Windows
> _JAVA_OPTIONS=-Djava.net.preferIPv4Stack=true
>
>
> On Thu, Oct 15, 2015 at 1:25 AM, Raghavendra Pandey <
> raghavendra.pandey@gmail.com> wrote:
>
>> Looks like you are facing ipv6 issue. Can you try using preferIPv4
>> property on.
>> On Oct 15, 2015 2:10 AM, "Steve Loughran" <st...@hortonworks.com> wrote:
>>
>>>
>>> On 14 Oct 2015, at 20:56, Marco Mistroni <mm...@gmail.com> wrote:
>>>
>>>
>>> 15/10/14 20:52:35 WARN : Your hostname, MarcoLaptop resolves to a
>>> loopback/non-r
>>> eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't
>>> find any
>>> external IP address!
>>> java.lang.RuntimeException: java.lang.RuntimeException: The root scratch
>>> dir: /t
>>> mp/hive on HDFS should be writable. Current permissions are: ---------
>>> at
>>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
>>> a:522)
>>> at
>>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
>>> cala:171)
>>> at
>>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
>>>
>>>
>>> now, that I haven't seen. Looks like it thinks the permissions are
>>> wrong, doesn't it?
>>>
>>
>
Re: Problem installing Sparck on Windows 8
Posted by Marco Mistroni <mm...@gmail.com>.
Hi
i t ried to set this variable in my windows env variables but got same
result
this si the result of calling set in my command prompt
have i amended it in the wrong place?
kr
marco
......
USERDOMAIN=MarcoLaptop
USERDOMAIN_ROAMINGPROFILE=MarcoLaptop
USERNAME=marco
USERPROFILE=C:\Users\marco
windir=C:\Windows
_JAVA_OPTIONS=-Djava.net.preferIPv4Stack=true
On Thu, Oct 15, 2015 at 1:25 AM, Raghavendra Pandey <
raghavendra.pandey@gmail.com> wrote:
> Looks like you are facing ipv6 issue. Can you try using preferIPv4
> property on.
> On Oct 15, 2015 2:10 AM, "Steve Loughran" <st...@hortonworks.com> wrote:
>
>>
>> On 14 Oct 2015, at 20:56, Marco Mistroni <mm...@gmail.com> wrote:
>>
>>
>> 15/10/14 20:52:35 WARN : Your hostname, MarcoLaptop resolves to a
>> loopback/non-r
>> eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't
>> find any
>> external IP address!
>> java.lang.RuntimeException: java.lang.RuntimeException: The root scratch
>> dir: /t
>> mp/hive on HDFS should be writable. Current permissions are: ---------
>> at
>> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
>> a:522)
>> at
>> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
>> cala:171)
>> at
>> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
>>
>>
>> now, that I haven't seen. Looks like it thinks the permissions are wrong,
>> doesn't it?
>>
>
Re: Problem installing Sparck on Windows 8
Posted by Raghavendra Pandey <ra...@gmail.com>.
Looks like you are facing ipv6 issue. Can you try using preferIPv4 property
on.
On Oct 15, 2015 2:10 AM, "Steve Loughran" <st...@hortonworks.com> wrote:
>
> On 14 Oct 2015, at 20:56, Marco Mistroni <mm...@gmail.com> wrote:
>
>
> 15/10/14 20:52:35 WARN : Your hostname, MarcoLaptop resolves to a
> loopback/non-r
> eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't
> find any
> external IP address!
> java.lang.RuntimeException: java.lang.RuntimeException: The root scratch
> dir: /t
> mp/hive on HDFS should be writable. Current permissions are: ---------
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
> a:522)
> at
> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
> cala:171)
> at
> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
>
>
> now, that I haven't seen. Looks like it thinks the permissions are wrong,
> doesn't it?
>
Re: Problem installing Sparck on Windows 8
Posted by Steve Loughran <st...@hortonworks.com>.
On 14 Oct 2015, at 20:56, Marco Mistroni <mm...@gmail.com>> wrote:
15/10/14 20:52:35 WARN : Your hostname, MarcoLaptop resolves to a loopback/non-r
eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't find any
external IP address!
java.lang.RuntimeException: java.lang.RuntimeException: The root scratch dir: /t
mp/hive on HDFS should be writable. Current permissions are: ---------
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:522)
at org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
cala:171)
at org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
now, that I haven't seen. Looks like it thinks the permissions are wrong, doesn't it?
Re: Problem installing Sparck on Windows 8
Posted by Marco Mistroni <mm...@gmail.com>.
Thanks Steve
followed instruction, spk is started and i can see the web ui
however after launching spark-shell i am getting another exception. is this
preventing me from actually using spark?
kind regards
marco
15/10/14 20:52:35 WARN : Your hostname, MarcoLaptop resolves to a
loopback/non-r
eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't
find any
external IP address!
java.lang.RuntimeException: java.lang.RuntimeException: The root scratch
dir: /t
mp/hive on HDFS should be writable. Current permissions are: ---------
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:522)
at
org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
cala:171)
at
org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
ntext.scala:162)
at
org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala
:160)
at
org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstruct
orAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingC
onstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:10
28)
at $iwC$$iwC.<init>(<console>:9)
at $iwC.<init>(<console>:18)
at <init>(<console>:20)
at .<init>(<console>:24)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:
1065)
at
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:
1340)
at
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840
)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:8
57)
at
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.sca
la:902)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at
org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
(SparkILoopInit.scala:132)
at
org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
(SparkILoopInit.scala:124)
at
org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
at
org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoop
Init.scala:124)
at
org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
at
org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.s
cala:159)
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
at
org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkIL
oopInit.scala:108)
at
org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:
64)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply(SparkILoop.scala:945)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply(SparkILoop.scala:945)
at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClass
Loader.scala:135)
at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$pr
ocess(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSub
mit$$runMain(SparkSubmit.scala:672)
at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:18
0)
at
org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.RuntimeException: The root scratch dir: /tmp/hive on
HDFS s
hould be writable. Current permissions are: ---------
at
org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(Sess
ionState.java:612)
at
org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(Sess
ionState.java:554)
at
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:508)
... 56 more
<console>:10: error: not found: value sqlContext
import sqlContext.implicits._
^
<console>:10: error: not found: value sqlContext
import sqlContext.sql
^
scala> sc
res0: org.apache.spark.SparkContext = org.apache.spark.SparkContext@5fc7255c
scala>
On Tue, Oct 13, 2015 at 5:02 PM, Steve Loughran <st...@hortonworks.com>
wrote:
>
> On 12 Oct 2015, at 23:11, Marco Mistroni <mm...@gmail.com> wrote:
>
> HI all
> i have downloaded spark-1.5.1-bin-hadoop.2.4
> i have extracted it on my machine, but when i go to the \bin directory and
> invoke
> spark-shell i get the following exception
>
> Could anyone assist pls?
>
>
> you've hit this https://wiki.apache.org/hadoop/WindowsProblems
>
> I followed instructions in ebook Learning Spark, but mayb the instructions
> are old?
> kr
> marco
>
>
> 15/10/12 23:10:29 WARN ObjectStore: Failed to get database default,
> returning No
> SuchObjectException
> 15/10/12 23:10:30 WARN : Your hostname, MarcoLaptop resolves to a
> loopback/non-r
> eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't
> find any
> external IP address!
> java.lang.RuntimeException: java.lang.NullPointerException
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
> a:522)
> at
> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
> cala:171)
> at
> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
> ntext.scala:162)
> at
> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala
> :160)
> at
> org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstruct
> orAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingC
> onstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> at
> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:10
> 28)
> at $iwC$$iwC.<init>(<console>:9)
> at $iwC.<init>(<console>:18)
> at <init>(<console>:20)
> at .<init>(<console>:24)
> at .<clinit>(<console>)
> at .<init>(<console>:7)
> at .<clinit>(<console>)
> at $print(<console>)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
> java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
> sorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:497)
> at
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:
> 1065)
> at
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:
> 1340)
> at
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840
> )
> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> at
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:8
> 57)
> at
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.sca
> la:902)
> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
> at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
> (SparkILoopInit.scala:132)
> at
> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
> (SparkILoopInit.scala:124)
> at
> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
> at
> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoop
> Init.scala:124)
> at
> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
>
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
> ILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
> at
> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.s
> cala:159)
> at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
> at
> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkIL
> oopInit.scala:108)
> at
> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:
> 64)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
> ILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
> ILoop$$process$1.apply(SparkILoop.scala:945)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
> ILoop$$process$1.apply(SparkILoop.scala:945)
> at
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClass
> Loader.scala:135)
> at org.apache.spark.repl.SparkILoop.org
> <http://org.apache.spark.repl.sparkiloop.org/>
> $apache$spark$repl$SparkILoop$$pr
> ocess(SparkILoop.scala:945)
> at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
> at org.apache.spark.repl.Main$.main(Main.scala:31)
> at org.apache.spark.repl.Main.main(Main.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
> java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
> sorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:497)
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSub
> mit$$runMain(SparkSubmit.scala:672)
> at
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:18
> 0)
> at
> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.NullPointerException
> at java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
> at org.apache.hadoop.util.Shell.runCommand(Shell.java:445)
> at org.apache.hadoop.util.Shell.run(Shell.java:418)
> at
> org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:
> 650)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:739)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:722)
> at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.
> loadPermissionInfo(RawLocalFileSystem.java:559)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.
> getPermission(RawLocalFileSystem.java:534)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(Sess
> ionState.java:599)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(Sess
> ionState.java:554)
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
> a:508)
> ... 56 more
>
> <console>:10: error: not found: value sqlContext
> import sqlContext.implicits._
> ^
> <console>:10: error: not found: value sqlContext
> import sqlContext.sql
> ^
>
>
>
>
>
Re: Problem installing Sparck on Windows 8
Posted by Steve Loughran <st...@hortonworks.com>.
On 12 Oct 2015, at 23:11, Marco Mistroni <mm...@gmail.com>> wrote:
HI all
i have downloaded spark-1.5.1-bin-hadoop.2.4
i have extracted it on my machine, but when i go to the \bin directory and invoke
spark-shell i get the following exception
Could anyone assist pls?
you've hit this https://wiki.apache.org/hadoop/WindowsProblems
I followed instructions in ebook Learning Spark, but mayb the instructions are old?
kr
marco
15/10/12 23:10:29 WARN ObjectStore: Failed to get database default, returning No
SuchObjectException
15/10/12 23:10:30 WARN : Your hostname, MarcoLaptop resolves to a loopback/non-r
eachable address: fe80:0:0:0:c5ed:a66d:9d95:5caa%wlan2, but we couldn't find any
external IP address!
java.lang.RuntimeException: java.lang.NullPointerException
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:522)
at org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.s
cala:171)
at org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveCo
ntext.scala:162)
at org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala
:160)
at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstruct
orAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingC
onstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:10
28)
at $iwC$$iwC.<init>(<console>:9)
at $iwC.<init>(<console>:18)
at <init>(<console>:20)
at .<init>(<console>:24)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:
1065)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:
1340)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840
)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:8
57)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.sca
la:902)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
(SparkILoopInit.scala:132)
at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply
(SparkILoopInit.scala:124)
at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoop
Init.scala:124)
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.s
cala:159)
at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkIL
oopInit.scala:108)
at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:
64)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$Spark
ILoop$$process$1.apply(SparkILoop.scala:945)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClass
Loader.scala:135)
at org.apache.spark.repl.SparkILoop.org<http://org.apache.spark.repl.sparkiloop.org/>$apache$spark$repl$SparkILoop$$pr
ocess(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSub
mit$$runMain(SparkSubmit.scala:672)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:18
0)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.NullPointerException
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:445)
at org.apache.hadoop.util.Shell.run(Shell.java:418)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:
650)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:739)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:722)
at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.
loadPermissionInfo(RawLocalFileSystem.java:559)
at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.
getPermission(RawLocalFileSystem.java:534)
at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(Sess
ionState.java:599)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(Sess
ionState.java:554)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.jav
a:508)
... 56 more
<console>:10: error: not found: value sqlContext
import sqlContext.implicits._
^
<console>:10: error: not found: value sqlContext
import sqlContext.sql
^