You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Dongjoon Hyun (Jira)" <ji...@apache.org> on 2023/08/24 03:14:00 UTC

[jira] [Resolved] (SPARK-44936) Simplify the log when Spark HybridStore hits the memory limit

     [ https://issues.apache.org/jira/browse/SPARK-44936?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Dongjoon Hyun resolved SPARK-44936.
-----------------------------------
    Fix Version/s: 4.0.0
       Resolution: Fixed

Issue resolved by pull request 42638
[https://github.com/apache/spark/pull/42638]

> Simplify the log when Spark HybridStore hits the memory limit
> -------------------------------------------------------------
>
>                 Key: SPARK-44936
>                 URL: https://issues.apache.org/jira/browse/SPARK-44936
>             Project: Spark
>          Issue Type: Improvement
>          Components: Spark Core
>    Affects Versions: 4.0.0
>            Reporter: Dongjoon Hyun
>            Assignee: Dongjoon Hyun
>            Priority: Minor
>             Fix For: 4.0.0
>
>
> *BEFORE*
> {code}
> 23/08/23 22:40:34 INFO FsHistoryProvider: Failed to create HybridStore for spark-1692805262618-xbiqs4fjqysv62d6708nx424qb0d4-driver-job/None. Using ROCKSDB.
> java.lang.RuntimeException: Not enough memory to create hybrid store for app spark-1692805262618-xbiqs4fjqysv62d6708nx424qb0d4-driver-job / None.
> 	at org.apache.spark.deploy.history.HistoryServerMemoryManager.lease(HistoryServerMemoryManager.scala:54)
> 	at org.apache.spark.deploy.history.FsHistoryProvider.createHybridStore(FsHistoryProvider.scala:1256)
> 	at org.apache.spark.deploy.history.FsHistoryProvider.loadDiskStore(FsHistoryProvider.scala:1231)
> 	at org.apache.spark.deploy.history.FsHistoryProvider.getAppUI(FsHistoryProvider.scala:342)
> 	at org.apache.spark.deploy.history.HistoryServer.getAppUI(HistoryServer.scala:199)
> 	at org.apache.spark.deploy.history.ApplicationCache.$anonfun$loadApplicationEntry$2(ApplicationCache.scala:163)
> 	at org.apache.spark.deploy.history.ApplicationCache.time(ApplicationCache.scala:134)
> 	at org.apache.spark.deploy.history.ApplicationCache.org$apache$spark$deploy$history$ApplicationCache$$loadApplicationEntry(ApplicationCache.scala:161)
> 	at org.apache.spark.deploy.history.ApplicationCache$$anon$1.load(ApplicationCache.scala:55)
> 	at org.apache.spark.deploy.history.ApplicationCache$$anon$1.load(ApplicationCache.scala:51)
> 	at org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> 	at org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> 	at org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> 	at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> 	at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:4000)
> 	at org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> 	at org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> 	at org.apache.spark.deploy.history.ApplicationCache.get(ApplicationCache.scala:88)
> 	at org.apache.spark.deploy.history.ApplicationCache.withSparkUI(ApplicationCache.scala:100)
> 	at org.apache.spark.deploy.history.HistoryServer.org$apache$spark$deploy$history$HistoryServer$$loadAppUi(HistoryServer.scala:256)
> 	at org.apache.spark.deploy.history.HistoryServer$$anon$1.doGet(HistoryServer.scala:104)
> 	at javax.servlet.http.HttpServlet.service(HttpServlet.java:503)
> 	at javax.servlet.http.HttpServlet.service(HttpServlet.java:590)
> 	at org.sparkproject.jetty.servlet.ServletHolder.handle(ServletHolder.java:799)
> 	at org.sparkproject.jetty.servlet.ServletHandler$ChainEnd.doFilter(ServletHandler.java:1656)
> 	at org.apache.spark.ui.HttpSecurityFilter.doFilter(HttpSecurityFilter.scala:95)
> 	at org.sparkproject.jetty.servlet.FilterHolder.doFilter(FilterHolder.java:193)
> 	at org.sparkproject.jetty.servlet.ServletHandler$Chain.doFilter(ServletHandler.java:1626)
> 	at org.sparkproject.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:552)
> 	at org.sparkproject.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
> 	at org.sparkproject.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1440)
> 	at org.sparkproject.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
> 	at org.sparkproject.jetty.servlet.ServletHandler.doScope(ServletHandler.java:505)
> 	at org.sparkproject.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
> 	at org.sparkproject.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1355)
> 	at org.sparkproject.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
> 	at org.sparkproject.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:772)
> 	at org.sparkproject.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:234)
> 	at org.sparkproject.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
> 	at org.sparkproject.jetty.server.Server.handle(Server.java:516)
> 	at org.sparkproject.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:487)
> 	at org.sparkproject.jetty.server.HttpChannel.dispatch(HttpChannel.java:732)
> 	at org.sparkproject.jetty.server.HttpChannel.handle(HttpChannel.java:479)
> 	at org.sparkproject.jetty.server.HttpConnection.onFillable(HttpConnection.java:277)
> 	at org.sparkproject.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
> 	at org.sparkproject.jetty.io.FillInterest.fillable(FillInterest.java:105)
> 	at org.sparkproject.jetty.io.ChannelEndPoint$1.run(ChannelEndPoint.java:104)
> 	at org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:338)
> 	at org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:315)
> 	at org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:173)
> 	at org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:131)
> 	at org.sparkproject.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:409)
> 	at org.sparkproject.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:883)
> 	at org.sparkproject.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:1034)
> 	at java.base/java.lang.Thread.run(Thread.java:833)
> {code}
> *AFTER*
> {code}
> 23/08/23 15:49:45 INFO FsHistoryProvider: Failed to create HybridStore for spark-1692813258617-4ami0jfq05ev2yyp0dxwdgg4jq3oj-driver-job/None. Using ROCKSDB. Not enough memory to create hybrid store for app spark-1692813258617-4ami0jfq05ev2yyp0dxwdgg4jq3oj-driver-job / None.
> {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org