You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/06/19 08:25:07 UTC

git commit: Minor fix

Repository: spark
Updated Branches:
  refs/heads/master 640c29436 -> 67fca189c


Minor fix

The value "env" is never used in SparkContext.scala.
Add detailed comment for method setDelaySeconds in MetadataCleaner.scala instead of the unsure one.

Author: WangTao <ba...@aliyun.com>

Closes #1105 from WangTaoTheTonic/master and squashes the following commits:

688358e [WangTao] Minor fix


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/67fca189
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/67fca189
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/67fca189

Branch: refs/heads/master
Commit: 67fca189c944b8f8ba222bb471e343893031bd7b
Parents: 640c294
Author: WangTao <ba...@aliyun.com>
Authored: Wed Jun 18 23:24:57 2014 -0700
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Wed Jun 18 23:24:57 2014 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala       | 1 -
 .../main/scala/org/apache/spark/util/MetadataCleaner.scala    | 7 ++++++-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/67fca189/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 0678bdd..f9476ff 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -224,7 +224,6 @@ class SparkContext(config: SparkConf) extends Logging {
 
   /** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
   val hadoopConfiguration: Configuration = {
-    val env = SparkEnv.get
     val hadoopConf = SparkHadoopUtil.get.newConfiguration()
     // Explicitly check for S3 environment variables
     if (System.getenv("AWS_ACCESS_KEY_ID") != null &&

http://git-wip-us.apache.org/repos/asf/spark/blob/67fca189/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
index 7ebed51..2889e17 100644
--- a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
@@ -91,8 +91,13 @@ private[spark] object MetadataCleaner {
     conf.set(MetadataCleanerType.systemProperty(cleanerType),  delay.toString)
   }
 
+  /**
+   * Set the default delay time (in seconds).
+   * @param conf SparkConf instance
+   * @param delay default delay time to set
+   * @param resetAll whether to reset all to default
+   */
   def setDelaySeconds(conf: SparkConf, delay: Int, resetAll: Boolean = true) {
-    // override for all ?
     conf.set("spark.cleaner.ttl", delay.toString)
     if (resetAll) {
       for (cleanerType <- MetadataCleanerType.values) {