You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by vanzin <gi...@git.apache.org> on 2018/11/02 18:28:18 UTC

[GitHub] spark pull request #22504: [SPARK-25118][Submit] Persist Driver Logs in Clie...

Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22504#discussion_r230466226
  
    --- Diff: core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala ---
    @@ -413,6 +417,66 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
         }
       }
     
    +  test("driver log cleaner") {
    +    val firstFileModifiedTime = TimeUnit.SECONDS.toMillis(10)
    +    val secondFileModifiedTime = TimeUnit.SECONDS.toMillis(20)
    +    val maxAge = TimeUnit.SECONDS.toSeconds(40)
    +    val clock = new ManualClock(0)
    +    val testConf = new SparkConf()
    +    testConf.set("spark.history.fs.logDirectory",
    +      Utils.createTempDir(namePrefix = "eventLog").getAbsolutePath())
    +    testConf.set(DRIVER_LOG_DFS_DIR, testDir.getAbsolutePath())
    +    testConf.set(DRIVER_LOG_CLEANER_ENABLED, true)
    +    testConf.set(DRIVER_LOG_CLEANER_INTERVAL, maxAge / 4)
    +    testConf.set(MAX_DRIVER_LOG_AGE_S, maxAge)
    +    val provider = new FsHistoryProvider(testConf, clock)
    +
    +    val log1 = FileUtils.getFile(testDir, "1" + DriverLogger.DRIVER_LOG_FILE_SUFFIX)
    +    createEmptyFile(log1)
    +    clock.setTime(firstFileModifiedTime)
    +    log1.setLastModified(clock.getTimeMillis())
    +    provider.cleanDriverLogs()
    +
    +    val log2 = FileUtils.getFile(testDir, "2" + DriverLogger.DRIVER_LOG_FILE_SUFFIX)
    +    createEmptyFile(log2)
    +    val log3 = FileUtils.getFile(testDir, "3" + DriverLogger.DRIVER_LOG_FILE_SUFFIX)
    +    createEmptyFile(log3)
    +    clock.setTime(secondFileModifiedTime)
    +    log2.setLastModified(clock.getTimeMillis())
    +    log3.setLastModified(clock.getTimeMillis())
    +    provider.cleanDriverLogs()
    +
    +    // This should not trigger any cleanup
    +    provider.listing.view(classOf[LogInfo]).iterator().asScala.toSeq.size should be(3)
    +
    +    // Should trigger cleanup for first file but not second one
    +    clock.setTime(firstFileModifiedTime + TimeUnit.SECONDS.toMillis(maxAge) + 1)
    +    provider.cleanDriverLogs()
    +    provider.listing.view(classOf[LogInfo]).iterator().asScala.toSeq.size should be(2)
    +    assert(!log1.exists())
    +    assert(log2.exists())
    +    assert(log3.exists())
    +
    +    // Update the third file length while keeping the original modified time
    +    Utils.tryLogNonFatalError {
    --- End diff --
    
    You don't want to catch the error, do you? Otherwise, if it happens, the test won't be doing what you want it to do.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org