You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Sean Owen (JIRA)" <ji...@apache.org> on 2016/08/02 14:03:20 UTC

[jira] [Commented] (SPARK-16852) RejectedExecutionException when exit at some times

    [ https://issues.apache.org/jira/browse/SPARK-16852?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15404020#comment-15404020 ] 

Sean Owen commented on SPARK-16852:
-----------------------------------

This is an effect rather than a cause. Can you say more about what triggers it or what the bug is? I don't think this is actionable.

> RejectedExecutionException when exit at some times
> --------------------------------------------------
>
>                 Key: SPARK-16852
>                 URL: https://issues.apache.org/jira/browse/SPARK-16852
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>            Reporter: Weizhong
>            Priority: Minor
>
> If we run a huge job, some times when exit will print RejectedExecutionException
> {noformat}
> 16/05/27 08:30:40 ERROR client.TransportResponseHandler: Still have 3 requests outstanding when connection from HGH1000017808/10.184.66.104:41980 is closed
> java.util.concurrent.RejectedExecutionException: Task scala.concurrent.impl.CallbackRunnable@6b66dba rejected from java.util.concurrent.ThreadPoolExecutor@60725736[Terminated, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 269]
> 	at java.util.concurrent.ThreadPoolExecutor$AbortPolicy.rejectedExecution(ThreadPoolExecutor.java:2047)
> 	at java.util.concurrent.ThreadPoolExecutor.reject(ThreadPoolExecutor.java:823)
> 	at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1369)
> 	at scala.concurrent.impl.ExecutionContextImpl$$anon$1.execute(ExecutionContextImpl.scala:133)
> 	at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:40)
> 	at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
> 	at scala.concurrent.Promise$class.complete(Promise.scala:55)
> 	at scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:153)
> 	at scala.concurrent.Future$$anonfun$recover$1.apply(Future.scala:324)
> 	at scala.concurrent.Future$$anonfun$recover$1.apply(Future.scala:324)
> 	at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:32)
> 	at org.spark-project.guava.util.concurrent.MoreExecutors$SameThreadExecutorService.execute(MoreExecutors.java:293)
> 	at scala.concurrent.impl.ExecutionContextImpl$$anon$1.execute(ExecutionContextImpl.scala:133)
> 	at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:40)
> 	at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
> 	at scala.concurrent.Promise$class.complete(Promise.scala:55)
> 	at scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:153)
> 	at scala.concurrent.Future$$anonfun$map$1.apply(Future.scala:235)
> 	at scala.concurrent.Future$$anonfun$map$1.apply(Future.scala:235)
> 	at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:32)
> 	at scala.concurrent.Future$InternalCallbackExecutor$Batch$$anonfun$run$1.processBatch$1(Future.scala:643)
> 	at scala.concurrent.Future$InternalCallbackExecutor$Batch$$anonfun$run$1.apply$mcV$sp(Future.scala:658)
> 	at scala.concurrent.Future$InternalCallbackExecutor$Batch$$anonfun$run$1.apply(Future.scala:635)
> 	at scala.concurrent.Future$InternalCallbackExecutor$Batch$$anonfun$run$1.apply(Future.scala:635)
> 	at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:72)
> 	at scala.concurrent.Future$InternalCallbackExecutor$Batch.run(Future.scala:634)
> 	at scala.concurrent.Future$InternalCallbackExecutor$.scala$concurrent$Future$InternalCallbackExecutor$$unbatchedExecute(Future.scala:694)
> 	at scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:685)
> 	at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:40)
> 	at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
> 	at scala.concurrent.Promise$class.tryFailure(Promise.scala:115)
> 	at scala.concurrent.impl.Promise$DefaultPromise.tryFailure(Promise.scala:153)
> 	at org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$onFailure$1(NettyRpcEnv.scala:192)
> 	at org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$1.apply(NettyRpcEnv.scala:214)
> 	at org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$1.apply(NettyRpcEnv.scala:214)
> 	at org.apache.spark.rpc.netty.RpcOutboxMessage.onFailure(Outbox.scala:74)
> 	at org.apache.spark.network.client.TransportResponseHandler.failOutstandingRequests(TransportResponseHandler.java:90)
> 	at org.apache.spark.network.client.TransportResponseHandler.channelUnregistered(TransportResponseHandler.java:104)
> 	at org.apache.spark.network.server.TransportChannelHandler.channelUnregistered(TransportChannelHandler.java:94)
> 	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158)
> 	at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144)
> 	at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53)
> 	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158)
> 	at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144)
> 	at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53)
> 	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158)
> 	at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144)
> 	at io.netty.channel.ChannelInboundHandlerAdapter.channelUnregistered(ChannelInboundHandlerAdapter.java:53)
> 	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelUnregistered(AbstractChannelHandlerContext.java:158)
> 	at io.netty.channel.AbstractChannelHandlerContext.fireChannelUnregistered(AbstractChannelHandlerContext.java:144)
> 	at io.netty.channel.DefaultChannelPipeline.fireChannelUnregistered(DefaultChannelPipeline.java:739)
> 	at io.netty.channel.AbstractChannel$AbstractUnsafe$8.run(AbstractChannel.java:659)
> 	at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:328)
> 	at io.netty.util.concurrent.SingleThreadEventExecutor.confirmShutdown(SingleThreadEventExecutor.java:627)
> 	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:362)
> 	at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
> 	at java.lang.Thread.run(Thread.java:745)
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org