You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by ankuriitg <gi...@git.apache.org> on 2018/10/09 20:53:56 UTC
[GitHub] spark pull request #22504: [SPARK-25118][Submit] Persist Driver Logs in Clie...
Github user ankuriitg commented on a diff in the pull request:
https://github.com/apache/spark/pull/22504#discussion_r223859868
--- Diff: core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala ---
@@ -806,6 +806,22 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
}
// Clean the blacklist from the expired entries.
clearBlacklist(CLEAN_INTERVAL_S)
+
+ // Delete driver logs from the configured spark dfs dir that exceed the configured max age
+ try {
+ val hdfsDir = conf.get("spark.driver.log.dfsDir")
+ val appDirs = fs.listLocatedStatus(new Path(hdfsDir))
+ while (appDirs.hasNext()) {
+ val appDirStatus = appDirs.next()
+ if (appDirStatus.getModificationTime() < maxTime) {
+ logInfo(s"Deleting expired driver log for: ${appDirStatus.getPath().getName()}")
+ deleteLog(appDirStatus.getPath())
+ }
+ }
+ } catch {
+ case nse: NoSuchElementException => // no-op
--- End diff --
conf.get("spark.driver.log.dfsDir")
---
---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org