You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Ping Tang <pt...@aerohive.com> on 2014/11/10 09:31:01 UTC

"-Error stopping receiver" in running Spark+Flume sample code "FlumeEventCount.scala"

Hi,

Can somebody help me to understand why this error occurred?


2014-11-10 00:17:44,512 INFO  [Executor task launch worker-0] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Started BlockGenerator

2014-11-10 00:17:44,513 INFO  [Executor task launch worker-0] receiver.ReceiverSupervisorImpl (Logging.scala:logInfo(59)) - Starting receiver

2014-11-10 00:17:44,513 INFO  [Thread-31] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Started block pushing thread

2014-11-10 00:17:44,789 INFO  [Executor task launch worker-0] receiver.ReceiverSupervisorImpl (Logging.scala:logInfo(59)) - Stopping receiver with message: Error starting receiver 0: java.lang.AbstractMethodError

2014-11-10 00:17:44,796 ERROR [Executor task launch worker-0] receiver.ReceiverSupervisorImpl (Logging.scala:logError(75)) - Error stopping receiver 0org.apache.spark.Logging$class.log(Logging.scala:52)

org.apache.spark.streaming.flume.FlumeReceiver.log(FlumeInputDStream.scala:134)

org.apache.spark.Logging$class.logInfo(Logging.scala:59)

org.apache.spark.streaming.flume.FlumeReceiver.logInfo(FlumeInputDStream.scala:134)

org.apache.spark.streaming.flume.FlumeReceiver.onStop(FlumeInputDStream.scala:151)

org.apache.spark.streaming.receiver.ReceiverSupervisor.stopReceiver(ReceiverSupervisor.scala:136)

org.apache.spark.streaming.receiver.ReceiverSupervisor.stop(ReceiverSupervisor.scala:112)

org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:127)

org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:106)

org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverLauncher$$anonfun$9.apply(ReceiverTracker.scala:264)

org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverLauncher$$anonfun$9.apply(ReceiverTracker.scala:257)

org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)

org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)

org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)

org.apache.spark.scheduler.Task.run(Task.scala:54)

org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:180)

java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)

java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)

java.lang.Thread.run(Thread.java:744)


2014-11-10 00:17:44,797 INFO  [Executor task launch worker-0] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Stopping BlockGenerator

2014-11-10 00:17:44,800 INFO  [Executor task launch worker-0] util.RecurringTimer (Logging.scala:logInfo(59)) - Stopped timer for BlockGenerator after time 1415607464800

2014-11-10 00:17:44,801 INFO  [Executor task launch worker-0] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Waiting for block pushing thread

2014-11-10 00:17:44,815 INFO  [Thread-31] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Pushing out the last 0 blocks

2014-11-10 00:17:44,816 INFO  [Thread-31] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Stopped block pushing thread

2014-11-10 00:17:44,816 INFO  [Executor task launch worker-0] receiver.BlockGenerator (Logging.scala:logInfo(59)) - Stopped BlockGenerator

2014-11-10 00:17:44,817 INFO  [Executor task launch worker-0] receiver.ReceiverSupervisorImpl (Logging.scala:logInfo(59)) - Waiting for executor stop is over

2014-11-10 00:17:44,818 ERROR [Executor task launch worker-0] receiver.ReceiverSupervisorImpl (Logging.scala:logError(75)) - Stopped executor with error: java.lang.AbstractMethodError

2014-11-10 00:17:44,820 ERROR [Executor task launch worker-0] executor.Executor (Logging.scala:logError(96)) - Exception in task 0.0 in stage 0.0 (TID 0)

java.lang.AbstractMethodError

at org.apache.spark.Logging$class.log(Logging.scala:52)

at org.apache.spark.streaming.flume.FlumeReceiver.log(FlumeInputDStream.scala:134)

at org.apache.spark.Logging$class.logInfo(Logging.scala:59)

at org.apache.spark.streaming.flume.FlumeReceiver.logInfo(FlumeInputDStream.scala:134)

at org.apache.spark.streaming.flume.FlumeReceiver.onStart(FlumeInputDStream.scala:146)

at org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:121)

at org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:106)

at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverLauncher$$anonfun$9.apply(ReceiverTracker.scala:264)

at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverLauncher$$anonfun$9.apply(ReceiverTracker.scala:257)

at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)

at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)

at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)

at org.apache.spark.scheduler.Task.run(Task.scala:54)

at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:180)

at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)

at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)

at java.lang.Thread.run(Thread.java:744)

2014-11-10 00:17:44,832 ERROR [Executor task launch worker-0] executor.ExecutorUncaughtExceptionHandler (Logging.scala:logError(96)) - Uncaught exception in thread Thread[Executor task launch worker-0,5,main]

java.lang.AbstractMethodError

at org.apache.spark.Logging$class.log(Logging.scala:52)

at org.apache.spark.streaming.flume.FlumeReceiver.log(FlumeInputDStream.scala:134)

at org.apache.spark.Logging$class.logInfo(Logging.scala:59)

at org.apache.spark.streaming.flume.FlumeReceiver.logInfo(FlumeInputDStream.scala:134)

at org.apache.spark.streaming.flume.FlumeReceiver.onStart(FlumeInputDStream.scala:146)

at org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:121)

at org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:106)

at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverLauncher$$anonfun$9.apply(ReceiverTracker.scala:264)

at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverLauncher$$anonfun$9.apply(ReceiverTracker.scala:257)

at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)

at org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)

at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)

at org.apache.spark.scheduler.Task.run(Task.scala:54)

at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:180)

at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)

at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)

at java.lang.Thread.run(Thread.java:744)