You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2017/04/06 00:33:19 UTC

spark git commit: [SPARK-20204][SQL][FOLLOWUP] SQLConf should react to change in default timezone settings

Repository: spark
Updated Branches:
  refs/heads/master 9543fc0e0 -> 9d68c6723


[SPARK-20204][SQL][FOLLOWUP] SQLConf should react to change in default timezone settings

## What changes were proposed in this pull request?
Make sure SESSION_LOCAL_TIMEZONE reflects the change in JVM's default timezone setting. Currently several timezone related tests fail as the change to default timezone is not picked up by SQLConf.

## How was this patch tested?
Added an unit test in ConfigEntrySuite

Author: Dilip Biswal <db...@us.ibm.com>

Closes #17537 from dilipbiswal/timezone_debug.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9d68c672
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9d68c672
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9d68c672

Branch: refs/heads/master
Commit: 9d68c67235481fa33983afb766916b791ca8212a
Parents: 9543fc0
Author: Dilip Biswal <db...@us.ibm.com>
Authored: Thu Apr 6 08:33:14 2017 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Thu Apr 6 08:33:14 2017 +0800

----------------------------------------------------------------------
 .../spark/internal/config/ConfigBuilder.scala      |  8 ++++++++
 .../apache/spark/internal/config/ConfigEntry.scala | 17 +++++++++++++++++
 .../spark/internal/config/ConfigEntrySuite.scala   |  9 +++++++++
 .../org/apache/spark/sql/internal/SQLConf.scala    |  2 +-
 4 files changed, 35 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9d68c672/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
index b992113..e5d60a7 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
@@ -147,6 +147,14 @@ private[spark] class TypedConfigBuilder[T](
     }
   }
 
+  /** Creates a [[ConfigEntry]] with a function to determine the default value */
+  def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = {
+    val entry = new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter,
+      stringConverter, parent._doc, parent._public)
+    parent._onCreate.foreach(_ (entry))
+    entry
+  }
+
   /**
    * Creates a [[ConfigEntry]] that has a default value. The default value is provided as a
    * [[String]] and must be a valid value for the entry.

http://git-wip-us.apache.org/repos/asf/spark/blob/9d68c672/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
index 4f3e42b..e86712e 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
@@ -78,7 +78,24 @@ private class ConfigEntryWithDefault[T] (
   def readFrom(reader: ConfigReader): T = {
     reader.get(key).map(valueConverter).getOrElse(_defaultValue)
   }
+}
+
+private class ConfigEntryWithDefaultFunction[T] (
+     key: String,
+     _defaultFunction: () => T,
+     valueConverter: String => T,
+     stringConverter: T => String,
+     doc: String,
+     isPublic: Boolean)
+  extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {
+
+  override def defaultValue: Option[T] = Some(_defaultFunction())
 
+  override def defaultValueString: String = stringConverter(_defaultFunction())
+
+  def readFrom(reader: ConfigReader): T = {
+    reader.get(key).map(valueConverter).getOrElse(_defaultFunction())
+  }
 }
 
 private class ConfigEntryWithDefaultString[T] (

http://git-wip-us.apache.org/repos/asf/spark/blob/9d68c672/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index 3ff7e84..e2ba0d2 100644
--- a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -251,4 +251,13 @@ class ConfigEntrySuite extends SparkFunSuite {
       .createWithDefault(null)
     testEntryRef(nullConf, ref(nullConf))
   }
+
+  test("conf entry : default function") {
+    var data = 0
+    val conf = new SparkConf()
+    val iConf = ConfigBuilder(testKey("intval")).intConf.createWithDefaultFunction(() => data)
+    assert(conf.get(iConf) === 0)
+    data = 2
+    assert(conf.get(iConf) === 2)
+  }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/9d68c672/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 5b5d547..e685c2b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -752,7 +752,7 @@ object SQLConf {
     buildConf("spark.sql.session.timeZone")
       .doc("""The ID of session local timezone, e.g. "GMT", "America/Los_Angeles", etc.""")
       .stringConf
-      .createWithDefault(TimeZone.getDefault().getID())
+      .createWithDefaultFunction(() => TimeZone.getDefault.getID)
 
   val WINDOW_EXEC_BUFFER_SPILL_THRESHOLD =
     buildConf("spark.sql.windowExec.buffer.spill.threshold")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org