You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@zeppelin.apache.org by Fengdong Yu <fe...@everstring.com> on 2015/11/06 13:42:38 UTC

Zepplen-0.5.5 Tested result

Hi, 

I upgraded Zeppelin-0.5.1 to 0.5.5.
but It complains “cannot find lzo.” but it works well ON 0.5.1.

I’ve specified in zeppelin-env.sh   
export ZEPPELIN_JAVA_OPTS="-Dspark.jars=xxxxxx/lzo/0.6.0/lib/hadoop-lzo-0.6.0.jar"


java.lang.RuntimeException: Error in configuring object
	at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:109)
	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:75)
	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
	at org.apache.spark.rdd.HadoopRDD.getInputFormat(HadoopRDD.scala:190)
	at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:203)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
	at org.apache.spark.rdd.RDD$$anonfun$treeAggregate$1.apply(RDD.scala:1093)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
	at org.apache.spark.rdd.RDD.treeAggregate(RDD.scala:1091)
	at org.apache.spark.sql.execution.datasources.json.InferSchema$.apply(InferSchema.scala:58)
	at org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$6.apply(JSONRelation.scala:105)
	at org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$6.apply(JSONRelation.scala:100)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema$lzycompute(JSONRelation.scala:100)
	at org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema(JSONRelation.scala:99)
	at org.apache.spark.sql.sources.HadoopFsRelation.schema$lzycompute(interfaces.scala:561)
	at org.apache.spark.sql.sources.HadoopFsRelation.schema(interfaces.scala:560)
	at org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(LogicalRelation.scala:31)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:120)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:104)
	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:21)
	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:26)
	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:28)
	at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
	at $iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
	at $iwC$$iwC$$iwC.<init>(<console>:34)
	at $iwC$$iwC.<init>(<console>:36)
	at $iwC.<init>(<console>:38)
	at <init>(<console>:40)
	at .<init>(<console>:44)
	at .<clinit>(<console>)
	at .<init>(<console>:7)
	at .<clinit>(<console>)
	at $print(<console>)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
	at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
	at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
	at org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:658)
	at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:623)
	at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:616)
	at org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
	at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
	at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276)
	at org.apache.zeppelin.scheduler.Job.run(Job.java:170)
	at org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:106)
	... 72 more
Caused by: java.lang.IllegalArgumentException: Compression codec com.hadoop.compression.lzo.LzoCodec not found.
	at org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:135)
	at org.apache.hadoop.io.compress.CompressionCodecFactory.<init>(CompressionCodecFactory.java:175)
	at org.apache.hadoop.mapred.TextInputFormat.configure(TextInputFormat.java:45)
	... 77 more
Caused by: java.lang.ClassNotFoundException: Class com.hadoop.compression.lzo.LzoCodec not found
	at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:1980)
	at org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:128)
	... 79 more



Re: Zepplen-0.5.5 Tested result

Posted by Fengdong Yu <fe...@everstring.com>.
Thanks Moon

I'll try

Azuryy Yu
Sr. Infrastructure Engineer

cel: 158-0164-9103
wetchat: azuryy


On Fri, Nov 6, 2015 at 10:39 PM, moon soo Lee <mo...@apache.org> wrote:

> Thanks for testing.
>
> with 0.5.5, in conf/zeppelin-env.sh,
>
> export SPARK_HOME=...
> export SPARK_SUBMIT_OPTIONS="--jars /your/path.jar,/another/path.jar"
>
> would work. Could you try in this way?
>
> Thanks,
> moon
>
> On Fri, Nov 6, 2015 at 9:43 PM Fengdong Yu <fe...@everstring.com>
> wrote:
>
> > Hi,
> >
> > I upgraded Zeppelin-0.5.1 to 0.5.5.
> > but It complains “cannot find lzo.” but it works well ON 0.5.1.
> >
> > I’ve specified in zeppelin-env.sh
> > export
> >
> ZEPPELIN_JAVA_OPTS="-Dspark.jars=xxxxxx/lzo/0.6.0/lib/hadoop-lzo-0.6.0.jar"
> >
> >
> > java.lang.RuntimeException: Error in configuring object
> >         at
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:109)
> >         at
> > org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:75)
> >         at
> >
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
> >         at
> > org.apache.spark.rdd.HadoopRDD.getInputFormat(HadoopRDD.scala:190)
> >         at
> > org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:203)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
> >         at scala.Option.getOrElse(Option.scala:120)
> >         at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
> >         at
> >
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
> >         at scala.Option.getOrElse(Option.scala:120)
> >         at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
> >         at
> >
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
> >         at scala.Option.getOrElse(Option.scala:120)
> >         at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
> >         at
> > org.apache.spark.rdd.RDD$$anonfun$treeAggregate$1.apply(RDD.scala:1093)
> >         at
> >
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
> >         at
> >
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
> >         at org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
> >         at org.apache.spark.rdd.RDD.treeAggregate(RDD.scala:1091)
> >         at
> >
> org.apache.spark.sql.execution.datasources.json.InferSchema$.apply(InferSchema.scala:58)
> >         at
> >
> org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$6.apply(JSONRelation.scala:105)
> >         at
> >
> org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$6.apply(JSONRelation.scala:100)
> >         at scala.Option.getOrElse(Option.scala:120)
> >         at
> >
> org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema$lzycompute(JSONRelation.scala:100)
> >         at
> >
> org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema(JSONRelation.scala:99)
> >         at
> >
> org.apache.spark.sql.sources.HadoopFsRelation.schema$lzycompute(interfaces.scala:561)
> >         at
> >
> org.apache.spark.sql.sources.HadoopFsRelation.schema(interfaces.scala:560)
> >         at
> >
> org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(LogicalRelation.scala:31)
> >         at
> > org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:120)
> >         at
> > org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:104)
> >         at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:21)
> >         at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:26)
> >         at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:28)
> >         at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
> >         at $iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
> >         at $iwC$$iwC$$iwC.<init>(<console>:34)
> >         at $iwC$$iwC.<init>(<console>:36)
> >         at $iwC.<init>(<console>:38)
> >         at <init>(<console>:40)
> >         at .<init>(<console>:44)
> >         at .<clinit>(<console>)
> >         at .<init>(<console>:7)
> >         at .<clinit>(<console>)
> >         at $print(<console>)
> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >         at
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> >         at
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >         at java.lang.reflect.Method.invoke(Method.java:606)
> >         at
> >
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
> >         at
> >
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
> >         at
> > org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
> >         at
> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
> >         at
> org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
> >         at
> >
> org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:658)
> >         at
> >
> org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:623)
> >         at
> >
> org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:616)
> >         at
> >
> org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
> >         at
> >
> org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
> >         at
> >
> org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276)
> >         at org.apache.zeppelin.scheduler.Job.run(Job.java:170)
> >         at
> > org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118)
> >         at
> > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
> >         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
> >         at
> >
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
> >         at
> >
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
> >         at
> >
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> >         at
> >
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> >         at java.lang.Thread.run(Thread.java:745)
> > Caused by: java.lang.reflect.InvocationTargetException
> >         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >         at
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> >         at
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> >         at java.lang.reflect.Method.invoke(Method.java:606)
> >         at
> >
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:106)
> >         ... 72 more
> > Caused by: java.lang.IllegalArgumentException: Compression codec
> > com.hadoop.compression.lzo.LzoCodec not found.
> >         at
> >
> org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:135)
> >         at
> >
> org.apache.hadoop.io.compress.CompressionCodecFactory.<init>(CompressionCodecFactory.java:175)
> >         at
> >
> org.apache.hadoop.mapred.TextInputFormat.configure(TextInputFormat.java:45)
> >         ... 77 more
> > Caused by: java.lang.ClassNotFoundException: Class
> > com.hadoop.compression.lzo.LzoCodec not found
> >         at
> >
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:1980)
> >         at
> >
> org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:128)
> >         ... 79 more
> >
> >
> >
>

Re: Zepplen-0.5.5 Tested result

Posted by moon soo Lee <mo...@apache.org>.
Thanks for testing.

with 0.5.5, in conf/zeppelin-env.sh,

export SPARK_HOME=...
export SPARK_SUBMIT_OPTIONS="--jars /your/path.jar,/another/path.jar"

would work. Could you try in this way?

Thanks,
moon

On Fri, Nov 6, 2015 at 9:43 PM Fengdong Yu <fe...@everstring.com> wrote:

> Hi,
>
> I upgraded Zeppelin-0.5.1 to 0.5.5.
> but It complains “cannot find lzo.” but it works well ON 0.5.1.
>
> I’ve specified in zeppelin-env.sh
> export
> ZEPPELIN_JAVA_OPTS="-Dspark.jars=xxxxxx/lzo/0.6.0/lib/hadoop-lzo-0.6.0.jar"
>
>
> java.lang.RuntimeException: Error in configuring object
>         at
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:109)
>         at
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:75)
>         at
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>         at
> org.apache.spark.rdd.HadoopRDD.getInputFormat(HadoopRDD.scala:190)
>         at
> org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:203)
>         at
> org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
>         at
> org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
>         at scala.Option.getOrElse(Option.scala:120)
>         at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
>         at
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
>         at
> org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
>         at
> org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
>         at scala.Option.getOrElse(Option.scala:120)
>         at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
>         at
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
>         at
> org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
>         at
> org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
>         at scala.Option.getOrElse(Option.scala:120)
>         at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
>         at
> org.apache.spark.rdd.RDD$$anonfun$treeAggregate$1.apply(RDD.scala:1093)
>         at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
>         at
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
>         at org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
>         at org.apache.spark.rdd.RDD.treeAggregate(RDD.scala:1091)
>         at
> org.apache.spark.sql.execution.datasources.json.InferSchema$.apply(InferSchema.scala:58)
>         at
> org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$6.apply(JSONRelation.scala:105)
>         at
> org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$6.apply(JSONRelation.scala:100)
>         at scala.Option.getOrElse(Option.scala:120)
>         at
> org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema$lzycompute(JSONRelation.scala:100)
>         at
> org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema(JSONRelation.scala:99)
>         at
> org.apache.spark.sql.sources.HadoopFsRelation.schema$lzycompute(interfaces.scala:561)
>         at
> org.apache.spark.sql.sources.HadoopFsRelation.schema(interfaces.scala:560)
>         at
> org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(LogicalRelation.scala:31)
>         at
> org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:120)
>         at
> org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:104)
>         at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:21)
>         at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:26)
>         at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:28)
>         at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
>         at $iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
>         at $iwC$$iwC$$iwC.<init>(<console>:34)
>         at $iwC$$iwC.<init>(<console>:36)
>         at $iwC.<init>(<console>:38)
>         at <init>(<console>:40)
>         at .<init>(<console>:44)
>         at .<clinit>(<console>)
>         at .<init>(<console>:7)
>         at .<clinit>(<console>)
>         at $print(<console>)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>         at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:606)
>         at
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>         at
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
>         at
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>         at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>         at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>         at
> org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:658)
>         at
> org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:623)
>         at
> org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:616)
>         at
> org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
>         at
> org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
>         at
> org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276)
>         at org.apache.zeppelin.scheduler.Job.run(Job.java:170)
>         at
> org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118)
>         at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>         at
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
>         at
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
>         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.reflect.InvocationTargetException
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>         at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:606)
>         at
> org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:106)
>         ... 72 more
> Caused by: java.lang.IllegalArgumentException: Compression codec
> com.hadoop.compression.lzo.LzoCodec not found.
>         at
> org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:135)
>         at
> org.apache.hadoop.io.compress.CompressionCodecFactory.<init>(CompressionCodecFactory.java:175)
>         at
> org.apache.hadoop.mapred.TextInputFormat.configure(TextInputFormat.java:45)
>         ... 77 more
> Caused by: java.lang.ClassNotFoundException: Class
> com.hadoop.compression.lzo.LzoCodec not found
>         at
> org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:1980)
>         at
> org.apache.hadoop.io.compress.CompressionCodecFactory.getCodecClasses(CompressionCodecFactory.java:128)
>         ... 79 more
>
>
>