You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Paolo Villaflores <pb...@gmail.com> on 2016/02/16 04:09:13 UTC
IllegalArgumentException UnsatisfiedLinkError snappy-1.1.2
spark-shell error
Hi,
I am trying to run spark 1.6.0.
I have previously just installed a fresh instance of hadoop 2.6.0 and hive
0.14.
Hadoop, mapreduce, hive and beeline are working.
However, as soon as I run `sc.textfile()` within spark-shell, it returns an
error:
$ spark-shell
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 1.6.0
/_/
Using Scala version 2.10.5 (Java HotSpot(TM) 64-Bit Server VM, Java
1.7.0_67)
Type in expressions to have them evaluated.
Type :help for more information.
Spark context available as sc.
SQL context available as sqlContext.
scala> val textFile = sc.textFile("README.md")
java.lang.IllegalArgumentException: java.lang.UnsatisfiedLinkError:
/tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
/tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
failed to map segment from shared object: Operation not permitted
at
org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:156)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at
java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at
org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:72)
at
org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:65)
at org.apache.spark.broadcast.TorrentBroadcast.org
$apache$spark$broadcast$TorrentBroadcast$$setConf(TorrentBroadcast.scala:73)
at
org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:80)
at
org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
at
org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:63)
at
org.apache.spark.SparkContext.broadcast(SparkContext.scala:1326)
at
org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1014)
at
org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1011)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at
org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
at
org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:1011)
at
org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:832)
at
org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:830)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
at
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
at
org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
at
org.apache.spark.SparkContext.textFile(SparkContext.scala:830)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:27)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
at $iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
at $iwC$$iwC$$iwC.<init>(<console>:40)
at $iwC$$iwC.<init>(<console>:42)
at $iwC.<init>(<console>:44)
at <init>(<console>:46)
at .<init>(<console>:50)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
at
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
at
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
at
org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at
org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
at
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
at
org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at
org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
at
org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.org
$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
at
org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
at
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at
org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.UnsatisfiedLinkError: /tmp/
snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so: /tmp/
snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so: failed
to map segment from shared object: Operation not permitted
at java.lang.ClassLoader$NativeLibrary.load(Native Method)
at java.lang.ClassLoader.loadLibrary1(ClassLoader.java:1965)
at java.lang.ClassLoader.loadLibrary0(ClassLoader.java:1890)
at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1851)
at java.lang.Runtime.load0(Runtime.java:795)
at java.lang.System.load(System.java:1062)
at
org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:174)
at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:152)
at org.xerial.snappy.Snappy.<clinit>(Snappy.java:46)
at
org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:154)
... 69 more
There is not a lot in spark's conf. The setup for spark were as per:
http://www.tutorialspoint.com/apache_spark/apache_spark_installation.htm
I have already made certain that everything in /tmp is world accessible
(uog+rwx).
Any advice/ideas would be much appreciated.
Cheers,
Paolo
Re: IllegalArgumentException UnsatisfiedLinkError snappy-1.1.2
spark-shell error
Posted by Arul Ramachandran <ar...@gmail.com>.
Hi Paolo,
Were you able to get this resolved? I am hitting this issue, can you please
share what was your solution.
Thanks
On Mon, Feb 15, 2016 at 7:49 PM, Paolo Villaflores <pb...@gmail.com>
wrote:
>
> Yes, I have sen that. But java.io.tmpdir has a default definition in
> linux--it is /tmp.
>
>
>
> On Tue, Feb 16, 2016 at 2:17 PM, Ted Yu <yu...@gmail.com> wrote:
>
>> Have you seen this thread ?
>>
>>
>> http://search-hadoop.com/m/q3RTtW43zT1e2nfb&subj=Re+ibsnappyjava+so+failed+to+map+segment+from+shared+object
>>
>> On Mon, Feb 15, 2016 at 7:09 PM, Paolo Villaflores <
>> pbvillaflores@gmail.com> wrote:
>>
>>>
>>> Hi,
>>>
>>>
>>>
>>> I am trying to run spark 1.6.0.
>>>
>>> I have previously just installed a fresh instance of hadoop 2.6.0 and
>>> hive 0.14.
>>>
>>> Hadoop, mapreduce, hive and beeline are working.
>>>
>>> However, as soon as I run `sc.textfile()` within spark-shell, it returns
>>> an error:
>>>
>>>
>>> $ spark-shell
>>> Welcome to
>>> ____ __
>>> / __/__ ___ _____/ /__
>>> _\ \/ _ \/ _ `/ __/ '_/
>>> /___/ .__/\_,_/_/ /_/\_\ version 1.6.0
>>> /_/
>>>
>>> Using Scala version 2.10.5 (Java HotSpot(TM) 64-Bit Server VM, Java
>>> 1.7.0_67)
>>> Type in expressions to have them evaluated.
>>> Type :help for more information.
>>> Spark context available as sc.
>>> SQL context available as sqlContext.
>>>
>>> scala> val textFile = sc.textFile("README.md")
>>> java.lang.IllegalArgumentException: java.lang.UnsatisfiedLinkError:
>>> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>>> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>>> failed to map segment from shared object: Operation not permitted
>>> at
>>> org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:156)
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>>> at
>>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>>> at
>>> java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>>> at
>>> org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:72)
>>> at
>>> org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:65)
>>> at org.apache.spark.broadcast.TorrentBroadcast.org
>>> $apache$spark$broadcast$TorrentBroadcast$$setConf(TorrentBroadcast.scala:73)
>>> at
>>> org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:80)
>>> at
>>> org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
>>> at
>>> org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:63)
>>> at
>>> org.apache.spark.SparkContext.broadcast(SparkContext.scala:1326)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1014)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1011)
>>> at
>>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
>>> at
>>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
>>> at
>>> org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
>>> at
>>> org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:1011)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:832)
>>> at
>>> org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:830)
>>> at
>>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
>>> at
>>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
>>> at
>>> org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
>>> at
>>> org.apache.spark.SparkContext.textFile(SparkContext.scala:830)
>>> at
>>> $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:27)
>>> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
>>> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
>>> at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
>>> at $iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
>>> at $iwC$$iwC$$iwC.<init>(<console>:40)
>>> at $iwC$$iwC.<init>(<console>:42)
>>> at $iwC.<init>(<console>:44)
>>> at <init>(<console>:46)
>>> at .<init>(<console>:50)
>>> at .<clinit>(<console>)
>>> at .<init>(<console>:7)
>>> at .<clinit>(<console>)
>>> at $print(<console>)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native
>>> Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>> at
>>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>>> at
>>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
>>> at
>>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>>> at
>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>>> at
>>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>>> at
>>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>>> at
>>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>>> at
>>> org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>>> at
>>> org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
>>> at
>>> org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
>>> at org.apache.spark.repl.SparkILoop.org
>>> $apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>> at
>>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>>> at
>>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>>> at org.apache.spark.repl.SparkILoop.org
>>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>>> at
>>> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>>> at org.apache.spark.repl.Main.main(Main.scala)
>>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native
>>> Method)
>>> at
>>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>>> at
>>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>>> at java.lang.reflect.Method.invoke(Method.java:606)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>>> at
>>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>>> at
>>> org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>>> Caused by: java.lang.UnsatisfiedLinkError: /tmp/
>>> snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>>> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>>> failed to map segment from shared object: Operation not permitted
>>> at java.lang.ClassLoader$NativeLibrary.load(Native Method)
>>> at java.lang.ClassLoader.loadLibrary1(ClassLoader.java:1965)
>>> at java.lang.ClassLoader.loadLibrary0(ClassLoader.java:1890)
>>> at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1851)
>>> at java.lang.Runtime.load0(Runtime.java:795)
>>> at java.lang.System.load(System.java:1062)
>>> at
>>> org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:174)
>>> at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:152)
>>> at org.xerial.snappy.Snappy.<clinit>(Snappy.java:46)
>>> at
>>> org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:154)
>>> ... 69 more
>>>
>>>
>>> There is not a lot in spark's conf. The setup for spark were as per:
>>> http://www.tutorialspoint.com/apache_spark/apache_spark_installation.htm
>>>
>>> I have already made certain that everything in /tmp is world accessible
>>> (uog+rwx).
>>>
>>> Any advice/ideas would be much appreciated.
>>>
>>>
>>> Cheers,
>>> Paolo
>>>
>>>
>>
>
Re: IllegalArgumentException UnsatisfiedLinkError snappy-1.1.2
spark-shell error
Posted by Paolo Villaflores <pb...@gmail.com>.
Yes, I have sen that. But java.io.tmpdir has a default definition in
linux--it is /tmp.
On Tue, Feb 16, 2016 at 2:17 PM, Ted Yu <yu...@gmail.com> wrote:
> Have you seen this thread ?
>
>
> http://search-hadoop.com/m/q3RTtW43zT1e2nfb&subj=Re+ibsnappyjava+so+failed+to+map+segment+from+shared+object
>
> On Mon, Feb 15, 2016 at 7:09 PM, Paolo Villaflores <
> pbvillaflores@gmail.com> wrote:
>
>>
>> Hi,
>>
>>
>>
>> I am trying to run spark 1.6.0.
>>
>> I have previously just installed a fresh instance of hadoop 2.6.0 and
>> hive 0.14.
>>
>> Hadoop, mapreduce, hive and beeline are working.
>>
>> However, as soon as I run `sc.textfile()` within spark-shell, it returns
>> an error:
>>
>>
>> $ spark-shell
>> Welcome to
>> ____ __
>> / __/__ ___ _____/ /__
>> _\ \/ _ \/ _ `/ __/ '_/
>> /___/ .__/\_,_/_/ /_/\_\ version 1.6.0
>> /_/
>>
>> Using Scala version 2.10.5 (Java HotSpot(TM) 64-Bit Server VM, Java
>> 1.7.0_67)
>> Type in expressions to have them evaluated.
>> Type :help for more information.
>> Spark context available as sc.
>> SQL context available as sqlContext.
>>
>> scala> val textFile = sc.textFile("README.md")
>> java.lang.IllegalArgumentException: java.lang.UnsatisfiedLinkError:
>> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>> failed to map segment from shared object: Operation not permitted
>> at
>> org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:156)
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>> at
>> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>> at
>> java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>> at
>> org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:72)
>> at
>> org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:65)
>> at org.apache.spark.broadcast.TorrentBroadcast.org
>> $apache$spark$broadcast$TorrentBroadcast$$setConf(TorrentBroadcast.scala:73)
>> at
>> org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:80)
>> at
>> org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
>> at
>> org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:63)
>> at
>> org.apache.spark.SparkContext.broadcast(SparkContext.scala:1326)
>> at
>> org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1014)
>> at
>> org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1011)
>> at
>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
>> at
>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
>> at
>> org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
>> at
>> org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:1011)
>> at
>> org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:832)
>> at
>> org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:830)
>> at
>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
>> at
>> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
>> at
>> org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
>> at
>> org.apache.spark.SparkContext.textFile(SparkContext.scala:830)
>> at
>> $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:27)
>> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
>> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
>> at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
>> at $iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
>> at $iwC$$iwC$$iwC.<init>(<console>:40)
>> at $iwC$$iwC.<init>(<console>:42)
>> at $iwC.<init>(<console>:44)
>> at <init>(<console>:46)
>> at .<init>(<console>:50)
>> at .<clinit>(<console>)
>> at .<init>(<console>:7)
>> at .<clinit>(<console>)
>> at $print(<console>)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:606)
>> at
>> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>> at
>> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
>> at
>> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>> at
>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>> at
>> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>> at
>> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>> at
>> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>> at
>> org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>> at
>> org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
>> at
>> org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
>> at org.apache.spark.repl.SparkILoop.org
>> $apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>> at
>> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>> at
>> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>> at org.apache.spark.repl.SparkILoop.org
>> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>> at
>> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>> at org.apache.spark.repl.Main$.main(Main.scala:31)
>> at org.apache.spark.repl.Main.main(Main.scala)
>> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>> at
>> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>> at
>> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>> at java.lang.reflect.Method.invoke(Method.java:606)
>> at
>> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>> at
>> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>> at
>> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>> at
>> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>> Caused by: java.lang.UnsatisfiedLinkError: /tmp/
>> snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so: /tmp/
>> snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
>> failed to map segment from shared object: Operation not permitted
>> at java.lang.ClassLoader$NativeLibrary.load(Native Method)
>> at java.lang.ClassLoader.loadLibrary1(ClassLoader.java:1965)
>> at java.lang.ClassLoader.loadLibrary0(ClassLoader.java:1890)
>> at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1851)
>> at java.lang.Runtime.load0(Runtime.java:795)
>> at java.lang.System.load(System.java:1062)
>> at
>> org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:174)
>> at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:152)
>> at org.xerial.snappy.Snappy.<clinit>(Snappy.java:46)
>> at
>> org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:154)
>> ... 69 more
>>
>>
>> There is not a lot in spark's conf. The setup for spark were as per:
>> http://www.tutorialspoint.com/apache_spark/apache_spark_installation.htm
>>
>> I have already made certain that everything in /tmp is world accessible
>> (uog+rwx).
>>
>> Any advice/ideas would be much appreciated.
>>
>>
>> Cheers,
>> Paolo
>>
>>
>
Re: IllegalArgumentException UnsatisfiedLinkError snappy-1.1.2
spark-shell error
Posted by Ted Yu <yu...@gmail.com>.
Have you seen this thread ?
http://search-hadoop.com/m/q3RTtW43zT1e2nfb&subj=Re+ibsnappyjava+so+failed+to+map+segment+from+shared+object
On Mon, Feb 15, 2016 at 7:09 PM, Paolo Villaflores <pb...@gmail.com>
wrote:
>
> Hi,
>
>
>
> I am trying to run spark 1.6.0.
>
> I have previously just installed a fresh instance of hadoop 2.6.0 and hive
> 0.14.
>
> Hadoop, mapreduce, hive and beeline are working.
>
> However, as soon as I run `sc.textfile()` within spark-shell, it returns
> an error:
>
>
> $ spark-shell
> Welcome to
> ____ __
> / __/__ ___ _____/ /__
> _\ \/ _ \/ _ `/ __/ '_/
> /___/ .__/\_,_/_/ /_/\_\ version 1.6.0
> /_/
>
> Using Scala version 2.10.5 (Java HotSpot(TM) 64-Bit Server VM, Java
> 1.7.0_67)
> Type in expressions to have them evaluated.
> Type :help for more information.
> Spark context available as sc.
> SQL context available as sqlContext.
>
> scala> val textFile = sc.textFile("README.md")
> java.lang.IllegalArgumentException: java.lang.UnsatisfiedLinkError:
> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
> /tmp/snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
> failed to map segment from shared object: Operation not permitted
> at
> org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:156)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at
> java.lang.reflect.Constructor.newInstance(Constructor.java:526)
> at
> org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:72)
> at
> org.apache.spark.io.CompressionCodec$.createCodec(CompressionCodec.scala:65)
> at org.apache.spark.broadcast.TorrentBroadcast.org
> $apache$spark$broadcast$TorrentBroadcast$$setConf(TorrentBroadcast.scala:73)
> at
> org.apache.spark.broadcast.TorrentBroadcast.<init>(TorrentBroadcast.scala:80)
> at
> org.apache.spark.broadcast.TorrentBroadcastFactory.newBroadcast(TorrentBroadcastFactory.scala:34)
> at
> org.apache.spark.broadcast.BroadcastManager.newBroadcast(BroadcastManager.scala:63)
> at
> org.apache.spark.SparkContext.broadcast(SparkContext.scala:1326)
> at
> org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1014)
> at
> org.apache.spark.SparkContext$$anonfun$hadoopFile$1.apply(SparkContext.scala:1011)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
> at
> org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
> at
> org.apache.spark.SparkContext.hadoopFile(SparkContext.scala:1011)
> at
> org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:832)
> at
> org.apache.spark.SparkContext$$anonfun$textFile$1.apply(SparkContext.scala:830)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
> at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
> at
> org.apache.spark.SparkContext.withScope(SparkContext.scala:714)
> at
> org.apache.spark.SparkContext.textFile(SparkContext.scala:830)
> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:27)
> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
> at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
> at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
> at $iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
> at $iwC$$iwC$$iwC.<init>(<console>:40)
> at $iwC$$iwC.<init>(<console>:42)
> at $iwC.<init>(<console>:44)
> at <init>(<console>:46)
> at .<init>(<console>:50)
> at .<clinit>(<console>)
> at .<init>(<console>:7)
> at .<clinit>(<console>)
> at $print(<console>)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
> at
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
> at
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
> at
> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> at
> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> at
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
> at
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
> at
> org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
> at
> org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
> at
> org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
> at org.apache.spark.repl.SparkILoop.org
> $apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> at
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
> at
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
> at org.apache.spark.repl.SparkILoop.org
> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
> at
> org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
> at org.apache.spark.repl.Main$.main(Main.scala:31)
> at org.apache.spark.repl.Main.main(Main.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
> at
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
> at
> org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
> at
> org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.UnsatisfiedLinkError: /tmp/
> snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so: /tmp/
> snappy-1.1.2-2ccaf764-c7c4-4ff1-a68e-bbfdec0a3aa1-libsnappyjava.so:
> failed to map segment from shared object: Operation not permitted
> at java.lang.ClassLoader$NativeLibrary.load(Native Method)
> at java.lang.ClassLoader.loadLibrary1(ClassLoader.java:1965)
> at java.lang.ClassLoader.loadLibrary0(ClassLoader.java:1890)
> at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1851)
> at java.lang.Runtime.load0(Runtime.java:795)
> at java.lang.System.load(System.java:1062)
> at
> org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:174)
> at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:152)
> at org.xerial.snappy.Snappy.<clinit>(Snappy.java:46)
> at
> org.apache.spark.io.SnappyCompressionCodec.<init>(CompressionCodec.scala:154)
> ... 69 more
>
>
> There is not a lot in spark's conf. The setup for spark were as per:
> http://www.tutorialspoint.com/apache_spark/apache_spark_installation.htm
>
> I have already made certain that everything in /tmp is world accessible
> (uog+rwx).
>
> Any advice/ideas would be much appreciated.
>
>
> Cheers,
> Paolo
>
>