You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Mingyu Kim <mk...@palantir.com> on 2014/06/24 23:44:53 UTC

JavaRDD.mapToPair throws NPE

Hi all,

I¹m trying to use JavaRDD.mapToPair(), but it fails with NPE on the
executor. The PairFunction used in the call is null for some reason. Any
comments/help would be appreciated!

My setup is,
* Java 7
* Spark 1.0.0
* Hadoop 2.0.0-mr1-cdh4.6.0
Here¹s the code snippet.

> import org.apache.spark.SparkConf;
> 
> import org.apache.spark.api.java.JavaPairRDD;
> 
> import org.apache.spark.api.java.JavaRDD;
> 
> import org.apache.spark.api.java.JavaSparkContext;
> 
> import org.apache.spark.api.java.function.PairFunction;
> 
> 
> 
> import scala.Tuple2;
> 
> 
> 
> public class Test {
> 
>     public static void main(String[] args) {
> 
>         SparkConf conf = new SparkConf()
> 
>             .setMaster("spark://mymaster")
> 
>             .setAppName("MyApp")
> 
>             .setSparkHome("/my/spark/home");
> 
> 
> 
>         JavaSparkContext sc = new JavaSparkContext(conf);
> 
>         sc.addJar("/path/to/jar"); // ship the jar of this class
> 
>         JavaRDD<String> rdd = sc.textFile("/path/to/nums.csv²); // nums.csv
> simply has one integer per line
> 
>         JavaPairRDD<Integer, Integer> pairRdd = rdd.mapToPair(new
> MyPairFunction());
> 
> 
> 
>         System.out.println(pairRdd.collect());
> 
>     }
> 
> 
> 
>     private static final class MyPairFunction implements PairFunction<String,
> Integer, Integer> {
> 
>         private static final long serialVersionUID = 1L;
> 
> 
> 
>         @Override
> 
>         public Tuple2<Integer, Integer> call(String s) throws Exception {
> 
>             return new Tuple2<Integer, Integer>(Integer.parseInt(s),
> Integer.parseInt(s));
> 
>         }
> 
>     }
> 
> }
> 
> 
Here¹s the stack trace.
> 
> Exception in thread "main" 14/06/24 14:39:01 INFO scheduler.TaskSchedulerImpl:
> Removed TaskSet 0.0, whose tasks have all completed, from pool
> 
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0
> failed 4 times, most recent failure: Exception failure in TID 6 on host
> 10.160.24.216: java.lang.NullPointerException
> 
>         
> org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(JavaP
> airRDD.scala:750)
> 
>         
> org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(JavaP
> airRDD.scala:750)
> 
>         scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
> 
>         scala.collection.Iterator$class.foreach(Iterator.scala:727)
> 
>         scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
> 
>         
> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
> 
>         
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
> 
>         
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
> 
>         scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
> 
>         scala.collection.AbstractIterator.to(Iterator.scala:1157)
> 
>         
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
> 
>         scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
> 
>         
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
> 
>         scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
> 
>         org.apache.spark.rdd.RDD$$anonfun$15.apply(RDD.scala:717)
> 
>         org.apache.spark.rdd.RDD$$anonfun$15.apply(RDD.scala:717)
> 
>         
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1080)
> 
>         
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1080)
> 
>         org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:111)
> 
>         org.apache.spark.scheduler.Task.run(Task.scala:51)
> 
>         org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:187)
> 
>         
> 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145>
)
> 
>         
> 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615>
)
> 
>         java.lang.Thread.run(Thread.java:722)
> 
> Driver stacktrace:
> 
> at 
> org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGSchedule
> r$$failJobAndIndependentStages(DAGScheduler.scala:1033)
> 
> at 
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGSchedul
> er.scala:1017)
> 
> at 
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGSchedul
> er.scala:1015)
> 
> at 
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> 
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
> 
> at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1015)
> 
> at 
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(D
> AGScheduler.scala:633)
> 
> at 
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(D
> AGScheduler.scala:633)
> 
> at scala.Option.foreach(Option.scala:236)
> 
> at 
> org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala
> :633)
> 
> at 
> org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.ap
> plyOrElse(DAGScheduler.scala:1207)
> 
> at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> 
> at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> 
> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> 
> at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> 
> at 
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispa
> tcher.scala:386)
> 
> at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> 
> at 
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:133
> 9)
> 
> at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> 
> at 
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:1
> 07)


Mingyu



Re: JavaRDD.mapToPair throws NPE

Posted by Andrew Ash <an...@andrewash.com>.
I think this may be similar to
https://issues.apache.org/jira/browse/SPARK-2292 so follow that ticket to
see how it gets resolved.

Andrew


On Tue, Jun 24, 2014 at 5:44 PM, Mingyu Kim <mk...@palantir.com> wrote:

> Hi all,
>
> I’m trying to use JavaRDD.mapToPair(), but it fails with NPE on the
> executor. The PairFunction used in the call is null for some reason. Any
> comments/help would be appreciated!
>
> My setup is,
>
>    - Java 7
>    - Spark 1.0.0
>    - Hadoop 2.0.0-mr1-cdh4.6.0
>
> Here’s the code snippet.
>
> import org.apache.spark.SparkConf;
>
> import org.apache.spark.api.java.JavaPairRDD;
>
> import org.apache.spark.api.java.JavaRDD;
>
> import org.apache.spark.api.java.JavaSparkContext;
>
> import org.apache.spark.api.java.function.PairFunction;
>
>
> import scala.Tuple2;
>
>
> public class Test {
>
>     public static void main(String[] args) {
>
>         SparkConf conf = new SparkConf()
>
>             .setMaster("spark://mymaster")
>
>             .setAppName("MyApp")
>
>             .setSparkHome("/my/spark/home");
>
>
>         JavaSparkContext sc = new JavaSparkContext(conf);
>
>         sc.addJar("/path/to/jar"); // ship the jar of this class
>
>         JavaRDD<String> rdd = sc.textFile("/path/to/nums.csv”); //
> nums.csv simply has one integer per line
>
>         JavaPairRDD<Integer, Integer> pairRdd = rdd.mapToPair(new
> MyPairFunction());
>
>
>         System.out.println(pairRdd.collect());
>
>     }
>
>
>     private static final class MyPairFunction implements
> PairFunction<String, Integer, Integer> {
>
>         private static final long serialVersionUID = 1L;
>
>
>         @Override
>
>         public Tuple2<Integer, Integer> call(String s) throws Exception {
>
>             return new Tuple2<Integer, Integer>(Integer.parseInt(s),
> Integer.parseInt(s));
>
>         }
>
>     }
>
> }
>
>
> Here’s the stack trace.
>
>
> Exception in thread "main" 14/06/24 14:39:01 INFO
> scheduler.TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all
> completed, from pool
>
> org.apache.spark.SparkException: Job aborted due to stage failure: Task
> 0.0:0 failed 4 times, most recent failure: Exception failure in TID 6 on
> host 10.160.24.216: java.lang.NullPointerException
>
>
> org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(
> JavaPairRDD.scala:750)
>
>
> org.apache.spark.api.java.JavaPairRDD$$anonfun$pairFunToScalaFun$1.apply(
> JavaPairRDD.scala:750)
>
>         scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
>
>         scala.collection.Iterator$class.foreach(Iterator.scala:727)
>
>         scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>
>         scala.collection.generic.Growable$class.$plus$plus$eq(
> Growable.scala:48)
>
>         scala.collection.mutable.ArrayBuffer.$plus$plus$eq(
> ArrayBuffer.scala:103)
>
>         scala.collection.mutable.ArrayBuffer.$plus$plus$eq(
> ArrayBuffer.scala:47)
>
>         scala.collection.TraversableOnce$class.to(
> TraversableOnce.scala:273)
>
>         scala.collection.AbstractIterator.to(Iterator.scala:1157)
>
>         scala.collection.TraversableOnce$class.toBuffer(
> TraversableOnce.scala:265)
>
>         scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
>
>         scala.collection.TraversableOnce$class.toArray(
> TraversableOnce.scala:252)
>
>         scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
>
>         org.apache.spark.rdd.RDD$$anonfun$15.apply(RDD.scala:717)
>
>         org.apache.spark.rdd.RDD$$anonfun$15.apply(RDD.scala:717)
>
>         org.apache.spark.SparkContext$$anonfun$runJob$4.apply(
> SparkContext.scala:1080)
>
>         org.apache.spark.SparkContext$$anonfun$runJob$4.apply(
> SparkContext.scala:1080)
>
>         org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:111
> )
>
>         org.apache.spark.scheduler.Task.run(Task.scala:51)
>
>         org.apache.spark.executor.Executor$TaskRunner.run(
> Executor.scala:187)
>
>         java.util.concurrent.ThreadPoolExecutor.runWorker(
> ThreadPoolExecutor.java:1145)
>
>         java.util.concurrent.ThreadPoolExecutor$Worker.run(
> ThreadPoolExecutor.java:615)
>
>         java.lang.Thread.run(Thread.java:722)
>
> Driver stacktrace:
>
> at org.apache.spark.scheduler.DAGScheduler.org
> $apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(
> DAGScheduler.scala:1033)
>
> at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(
> DAGScheduler.scala:1017)
>
> at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(
> DAGScheduler.scala:1015)
>
> at scala.collection.mutable.ResizableArray$class.foreach(
> ResizableArray.scala:59)
>
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
>
> at org.apache.spark.scheduler.DAGScheduler.abortStage(
> DAGScheduler.scala:1015)
>
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(
> DAGScheduler.scala:633)
>
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(
> DAGScheduler.scala:633)
>
> at scala.Option.foreach(Option.scala:236)
>
> at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(
> DAGScheduler.scala:633)
>
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(
> DAGScheduler.scala:1207)
>
> at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
>
> at akka.actor.ActorCell.invoke(ActorCell.scala:456)
>
> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
>
> at akka.dispatch.Mailbox.run(Mailbox.scala:219)
>
> at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(
> AbstractDispatcher.scala:386)
>
> at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
>
> at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(
> ForkJoinPool.java:1339)
>
> at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979
> )
>
> at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(
> ForkJoinWorkerThread.java:107)
>
>
> Mingyu
>