You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "wuyi (Jira)" <ji...@apache.org> on 2021/08/02 14:20:00 UTC

[jira] [Updated] (SPARK-36383) NullPointerException throws during executor shutdown

     [ https://issues.apache.org/jira/browse/SPARK-36383?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

wuyi updated SPARK-36383:
-------------------------
    Summary: NullPointerException throws during executor shutdown  (was: Avoid NullPointerException during executor shutdown)

> NullPointerException throws during executor shutdown
> ----------------------------------------------------
>
>                 Key: SPARK-36383
>                 URL: https://issues.apache.org/jira/browse/SPARK-36383
>             Project: Spark
>          Issue Type: Improvement
>          Components: Spark Core
>    Affects Versions: 3.0.3, 3.1.2, 3.2.0, 3.3.0
>            Reporter: wuyi
>            Priority: Major
>
> {code:java}
> 21/07/23 16:04:10 WARN Executor: Unable to stop executor metrics poller
> java.lang.NullPointerException
>         at org.apache.spark.executor.Executor.stop(Executor.scala:318)
>         at org.apache.spark.executor.Executor.$anonfun$stopHookReference$1(Executor.scala:76)
>         at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:214)
>         at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$2(ShutdownHookManager.scala:188)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2025)
>         at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$1(ShutdownHookManager.scala:188)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at scala.util.Try$.apply(Try.scala:213)
>         at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
>         at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
>         at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>         at java.lang.Thread.run(Thread.java:748)
> 21/07/23 16:04:10 WARN Executor: Unable to stop heartbeater
> java.lang.NullPointerException
>         at org.apache.spark.executor.Executor.stop(Executor.scala:324)
>         at org.apache.spark.executor.Executor.$anonfun$stopHookReference$1(Executor.scala:76)
>         at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:214)
>         at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$2(ShutdownHookManager.scala:188)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2025)
>         at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$1(ShutdownHookManager.scala:188)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at scala.util.Try$.apply(Try.scala:213)
>         at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
>         at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
>         at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>         at java.lang.Thread.run(Thread.java:748)
> 21/07/23 16:04:10 ERROR Utils: Uncaught exception in thread shutdown-hook-0
> java.lang.NullPointerException
>         at org.apache.spark.executor.Executor.$anonfun$stop$3(Executor.scala:334)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at org.apache.spark.util.Utils$.withContextClassLoader(Utils.scala:231)
>         at org.apache.spark.executor.Executor.stop(Executor.scala:334)
>         at org.apache.spark.executor.Executor.$anonfun$stopHookReference$1(Executor.scala:76)
>         at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:214)
>         at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$2(ShutdownHookManager.scala:188)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:2025)
>         at org.apache.spark.util.SparkShutdownHookManager.$anonfun$runAll$1(ShutdownHookManager.scala:188)
>         at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>         at scala.util.Try$.apply(Try.scala:213)
>         at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:188)
>         at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:178)
>         at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>         at java.lang.Thread.run(Thread.java:748)
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org