You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2020/03/09 06:47:01 UTC

[GitHub] [spark] beliefer opened a new pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

beliefer opened a new pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852
 
 
   ### What changes were proposed in this pull request?
   This PR follows up https://github.com/apache/spark/pull/27847.
   I sorted out some information show below.
   
   spark.yarn.isPython | 1.5.0 | SPARK-5479 | 38112905bc3b33f2ae75274afba1c30e116f6e46#diff-4d2ab44195558d5a9d5f15b8803ef39d | 
   -- | -- | -- | -- | --
   spark.task.cpus | 0.5.0 | None | e5c4cd8a5e188592f8786a265c0cd073c69ac886#diff-391214d132a0fb4478f4f9c2313d8966 |  
   spark.dynamicAllocation.enabled | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.dynamicAllocation.testing | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.dynamicAllocation.minExecutors | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.dynamicAllocation.initialExecutors | 1.3.0 | SPARK-4585 | b2047b55c5fc85de6b63276d8ab9610d2496e08b#diff-b096353602813e47074ace09a3890d56 |  
   spark.dynamicAllocation.maxExecutors | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.dynamicAllocation.executorAllocationRatio | 2.4.0 | SPARK-22683 | 55c4ca88a3b093ee197a8689631be8d1fac1f10f#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.dynamicAllocation.cachedExecutorIdleTimeout | 1.4.0 | SPARK-7955 | 6faaf15ba311bc3a79aae40a6c9c4befabb6889f#diff-b096353602813e47074ace09a3890d56 |  
   spark.dynamicAllocation.executorIdleTimeout | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.dynamicAllocation.shuffleTracking.enabled | 3.0.0 | SPARK-27963 | 2ddeff97d7329942a98ef363991eeabc3fa71a76#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.dynamicAllocation.shuffleTimeout | 3.0.0 | SPARK-27963 | 2ddeff97d7329942a98ef363991eeabc3fa71a76#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.dynamicAllocation.schedulerBacklogTimeout | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.dynamicAllocation.sustainedSchedulerBacklogTimeout | 1.2.0 | SPARK-3795 | 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d |  
   spark.locality.wait | 0.5.0 | None | e5c4cd8a5e188592f8786a265c0cd073c69ac886#diff-391214d132a0fb4478f4f9c2313d8966 |  
   spark.shuffle.service.enabled | 1.2.0 | SPARK-3796 | f55218aeb1e9d638df6229b36a59a15ce5363482#diff-2b643ea78c1add0381754b1f47eec132 |  
   Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED | 3.0.0 | SPARK-27677 | e9f3f62b2c0f521f3cc23fef381fc6754853ad4f#diff-6bdad48cfc34314e89599655442ff210 | spark.shuffle.service.fetch.rdd.enabled
   spark.shuffle.service.db.enabled | 3.0.0 | SPARK-26288 | 8b0aa59218c209d39cbba5959302d8668b885cf6#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.shuffle.service.port | 1.2.0 | SPARK-3796 | f55218aeb1e9d638df6229b36a59a15ce5363482#diff-2b643ea78c1add0381754b1f47eec132 |  
   spark.kerberos.keytab | 3.0.0 | SPARK-25372 | 51540c2fa677658be954c820bc18ba748e4c8583#diff-6bdad48cfc34314e89599655442ff210 | 
   spark.kerberos.principal | 3.0.0 | SPARK-25372 | 51540c2fa677658be954c820bc18ba748e4c8583#diff-6bdad48cfc34314e89599655442ff210 | 
   spark.kerberos.relogin.period | 3.0.0 | SPARK-23781 | 68dde3481ea458b0b8deeec2f99233c2d4c1e056#diff-6bdad48cfc34314e89599655442ff210 | 
   spark.kerberos.renewal.credentials | 3.0.0 | SPARK-26595 | 2a67dbfbd341af166b1c85904875f26a6dea5ba8#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.kerberos.access.hadoopFileSystems | 3.0.0 | SPARK-26766 | d0443a74d185ec72b747fa39994fa9a40ce974cf#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.executor.instances | 1.0.0 | SPARK-1126 | 1617816090e7b20124a512a43860a21232ebf511#diff-4d2ab44195558d5a9d5f15b8803ef39d |  
   spark.yarn.dist.pyFiles | 2.2.1 | SPARK-21714 | d10c9dc3f631a26dbbbd8f5c601ca2001a5d7c80#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.task.maxDirectResultSize | 2.0.0 | SPARK-13830 | 2ef4c5963bff3574fe17e669d703b25ddd064e5d#diff-5a0de266c82b95adb47d9bca714e1f1b |  
   spark.task.maxFailures | 0.8.0 | None | 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef#diff-264da78fe625d594eae59d1adabc8ae9 |  
   spark.task.reaper.enabled | 2.0.3 | SPARK-18761 | 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b | 
   spark.task.reaper.killTimeout | 2.0.3 | SPARK-18761 | 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b | 
   spark.task.reaper.pollingInterval | 2.0.3 | SPARK-18761 | 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b | 
   spark.task.reaper.threadDump | 2.0.3 | SPARK-18761 | 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b | 
   spark.blacklist.enabled | 2.1.0 | SPARK-17675 | 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.task.maxTaskAttemptsPerExecutor | 2.1.0 | SPARK-17675 | 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.task.maxTaskAttemptsPerNode | 2.1.0 | SPARK-17675 | 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.application.maxFailedTasksPerExecutor | 2.2.0 | SPARK-8425 | 93cdb8a7d0f124b4db069fd8242207c82e263c52#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.stage.maxFailedTasksPerExecutor | 2.1.0 | SPARK-17675 | 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.application.maxFailedExecutorsPerNode | 2.2.0 | SPARK-8425 | 93cdb8a7d0f124b4db069fd8242207c82e263c52#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.stage.maxFailedExecutorsPerNode | 2.1.0 | SPARK-17675 | 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.timeout | 2.1.0 | SPARK-17675 | 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.blacklist.killBlacklistedExecutors | 2.2.0 | SPARK-16554 | 6287c94f08200d548df5cc0a401b73b84f9968c4#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.scheduler.executorTaskBlacklistTime | 1.0.0 | None | ab747d39ddc7c8a314ed2fb26548fc5652af0d74#diff-bad3987c83bd22d46416d3dd9d208e76 | 
   spark.blacklist.application.fetchFailure.enabled | 2.3.0 | SPARK-13669 and SPARK-20898 | 9e50a1d37a4cf0c34e20a7c1a910ceaff41535a2#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.files.fetchFailure.unRegisterOutputOnHost | 2.3.0 | SPARK-19753 | dccc0aa3cf957c8eceac598ac81ac82f03b52105#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.scheduler.listenerbus.eventqueue.capacity | 2.3.0 | SPARK-20887 | 629f38e171409da614fd635bd8dd951b7fde17a4#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.scheduler.listenerbus.metrics.maxListenerClassesTimed | 2.3.0 | SPARK-20863 | 2a23cdd078a7409d0bb92cf27718995766c41b1d#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.scheduler.listenerbus.logSlowEvent | 3.0.0 | SPARK-30812 | 68d7edf9497bea2f73707d32ab55dd8e53088e7c#diff-6bdad48cfc34314e89599655442ff210 |  
   spark.scheduler.listenerbus.logSlowEvent.threshold | 3.0.0 | SPARK-29001 | 0346afa8fc348aa1b3f5110df747a64e3b2da388#diff-6bdad48cfc34314e89599655442ff210 |  
   
   
   ### Why are the changes needed?
   Supplemental configuration version information.
   
   
   ### Does this PR introduce any user-facing change?
   No
   
   
   ### How was this patch tested?
   Exists UT
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596358912
 
 
   Merged build finished. Test PASSed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389490261
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -685,13 +743,15 @@ package object config {
         "an event queue using capacity specified by `spark.scheduler.listenerbus" +
         ".eventqueue.queueName.capacity` first. If it's not configured, Spark will " +
         "use the default capacity specified by this config.")
+      .version("2.3.0")
 
 Review comment:
   SPARK-20887, commit ID: 629f38e171409da614fd635bd8dd951b7fde17a4#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486225
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487823
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks. " +
         "In case of dynamic allocation if this feature is enabled executors having only disk " +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_DB_ENABLED =
     ConfigBuilder("spark.shuffle.service.db.enabled")
       .doc("Whether to use db in ExternalShuffleService. Note that this only affects " +
         "standalone mode.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(true)
 
   private[spark] val SHUFFLE_SERVICE_PORT =
-    ConfigBuilder("spark.shuffle.service.port").intConf.createWithDefault(7337)
+    ConfigBuilder("spark.shuffle.service.port").version("1.2.0").intConf.createWithDefault(7337)
 
   private[spark] val KEYTAB = ConfigBuilder("spark.kerberos.keytab")
     .doc("Location of user's keytab.")
+    .version("3.0.0")
     .stringConf.createOptional
 
   private[spark] val PRINCIPAL = ConfigBuilder("spark.kerberos.principal")
     .doc("Name of the Kerberos principal.")
-    .stringConf.createOptional
+    .version("3.0.0")
 
 Review comment:
   SPARK-25372, commit ID: 51540c2fa677658be954c820bc18ba748e4c8583#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487701
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks. " +
         "In case of dynamic allocation if this feature is enabled executors having only disk " +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_DB_ENABLED =
     ConfigBuilder("spark.shuffle.service.db.enabled")
       .doc("Whether to use db in ExternalShuffleService. Note that this only affects " +
         "standalone mode.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(true)
 
   private[spark] val SHUFFLE_SERVICE_PORT =
-    ConfigBuilder("spark.shuffle.service.port").intConf.createWithDefault(7337)
+    ConfigBuilder("spark.shuffle.service.port").version("1.2.0").intConf.createWithDefault(7337)
 
 Review comment:
   SPARK-3796, commit ID: f55218aeb1e9d638df6229b36a59a15ce5363482#diff-2b643ea78c1add0381754b1f47eec132

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489654
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
 
 Review comment:
   SPARK-8425, commit ID: 93cdb8a7d0f124b4db069fd8242207c82e263c52#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] HyukjinKwon commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
HyukjinKwon commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-610367472
 
 
   Merged to master and branch-3.0.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] SparkQA commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
SparkQA commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596358446
 
 
   **[Test build #119552 has started](https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/119552/testReport)** for PR 27852 at commit [`dc79da0`](https://github.com/apache/spark/commit/dc79da0cbcfb885820e57167b471ba9a8aea3036).

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488532
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
 
 Review comment:
   No JIRA ID, commit ID: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef#diff-264da78fe625d594eae59d1adabc8ae9

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389485859
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
 
 Review comment:
   SPARK-5479, commit ID: 38112905bc3b33f2ae75274afba1c30e116f6e46#diff-4d2ab44195558d5a9d5f15b8803ef39d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487166
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487142
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486500
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
 
 Review comment:
   SPARK-22683, commit ID: 55c4ca88a3b093ee197a8689631be8d1fac1f10f#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487374
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3796, commit ID: f55218aeb1e9d638df6229b36a59a15ce5363482#diff-2b643ea78c1add0381754b1f47eec132

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487912
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks. " +
         "In case of dynamic allocation if this feature is enabled executors having only disk " +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_DB_ENABLED =
     ConfigBuilder("spark.shuffle.service.db.enabled")
       .doc("Whether to use db in ExternalShuffleService. Note that this only affects " +
         "standalone mode.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(true)
 
   private[spark] val SHUFFLE_SERVICE_PORT =
-    ConfigBuilder("spark.shuffle.service.port").intConf.createWithDefault(7337)
+    ConfigBuilder("spark.shuffle.service.port").version("1.2.0").intConf.createWithDefault(7337)
 
   private[spark] val KEYTAB = ConfigBuilder("spark.kerberos.keytab")
     .doc("Location of user's keytab.")
+    .version("3.0.0")
     .stringConf.createOptional
 
   private[spark] val PRINCIPAL = ConfigBuilder("spark.kerberos.principal")
     .doc("Name of the Kerberos principal.")
-    .stringConf.createOptional
+    .version("3.0.0")
+    .stringConf
+    .createOptional
 
   private[spark] val KERBEROS_RELOGIN_PERIOD = ConfigBuilder("spark.kerberos.relogin.period")
+    .version("3.0.0")
 
 Review comment:
   SPARK-23781, commit ID: 68dde3481ea458b0b8deeec2f99233c2d4c1e056#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488912
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
 
 Review comment:
   SPARK-17675, commit ID: 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488721
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
 
 Review comment:
   SPARK-18761, commit ID: 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488021
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -561,6 +597,7 @@ package object config {
         "Which credentials to use when renewing delegation tokens for executors. Can be either " +
         "'keytab', the default, which requires a keytab to be provided, or 'ccache', which uses " +
         "the local credentials cache.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-26595, commit ID: 2a67dbfbd341af166b1c85904875f26a6dea5ba8#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488429
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
 
 Review comment:
   SPARK-13830, commit ID: 2ef4c5963bff3574fe17e669d703b25ddd064e5d#diff-5a0de266c82b95adb47d9bca714e1f1b

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487281
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
 
 Review comment:
   No JIRA ID, commit ID: e5c4cd8a5e188592f8786a265c0cd073c69ac886#diff-391214d132a0fb4478f4f9c2313d8966

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389490452
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -701,6 +761,7 @@ package object config {
       .doc("When enabled, log the event that takes too much time to process. This helps us " +
         "discover the event types that cause performance bottlenecks. The time threshold is " +
         "controlled by spark.scheduler.listenerbus.logSlowEvent.threshold.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-30812, commit ID: 68d7edf9497bea2f73707d32ab55dd8e53088e7c#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488703
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
 
 Review comment:
   SPARK-18761, commit ID: 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389490341
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -685,13 +743,15 @@ package object config {
         "an event queue using capacity specified by `spark.scheduler.listenerbus" +
         ".eventqueue.queueName.capacity` first. If it's not configured, Spark will " +
         "use the default capacity specified by this config.")
+      .version("2.3.0")
       .intConf
       .checkValue(_ > 0, "The capacity of listener bus event queue must be positive")
       .createWithDefault(10000)
 
   private[spark] val LISTENER_BUS_METRICS_MAX_LISTENER_CLASSES_TIMED =
     ConfigBuilder("spark.scheduler.listenerbus.metrics.maxListenerClassesTimed")
       .internal()
+      .version("2.3.0")
 
 Review comment:
   SPARK-20863, commit ID: 2a23cdd078a7409d0bb92cf27718995766c41b1d#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487043
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
 
 Review comment:
   SPARK-27963, commit ID: 2ddeff97d7329942a98ef363991eeabc3fa71a76#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596358912
 
 
   Merged build finished. Test PASSed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486297
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488751
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
 
 Review comment:
   SPARK-18761, commit ID: 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489325
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedExecutorsPerNode")
+      .version("2.1.0")
 
 Review comment:
   SPARK-17675, commit ID: 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596362301
 
 
   Merged build finished. Test FAILed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596362304
 
 
   Test FAILed.
   Refer to this link for build results (access rights to CI server needed): 
   https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder/119552/
   Test FAILed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488141
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
 
 Review comment:
   SPARK-26766, commit ID: d0443a74d185ec72b747fa39994fa9a40ce974cf#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486734
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486576
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
 
 Review comment:
   SPARK-7955, commit ID: 6faaf15ba311bc3a79aae40a6c9c4befabb6889f#diff-b096353602813e47074ace09a3890d56

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487513
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks. " +
         "In case of dynamic allocation if this feature is enabled executors having only disk " +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-27677, commit ID: e9f3f62b2c0f521f3cc23fef381fc6754853ad4f#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486098
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488872
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
 
 Review comment:
   SPARK-17675, commit ID: 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488328
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
 
 Review comment:
   SPARK-21714, commit ID: d10c9dc3f631a26dbbbd8f5c601ca2001a5d7c80#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389490044
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedExecutorsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val BLACKLIST_TIMEOUT_CONF =
     ConfigBuilder("spark.blacklist.timeout")
+      .version("2.1.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .createOptional
 
   private[spark] val BLACKLIST_KILL_ENABLED =
     ConfigBuilder("spark.blacklist.killBlacklistedExecutors")
+      .version("2.2.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val BLACKLIST_LEGACY_TIMEOUT_CONF =
     ConfigBuilder("spark.scheduler.executorTaskBlacklistTime")
       .internal()
+      .version("1.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .createOptional
 
   private[spark] val BLACKLIST_FETCH_FAILURE_ENABLED =
     ConfigBuilder("spark.blacklist.application.fetchFailure.enabled")
+      .version("2.3.0")
 
 Review comment:
   SPARK-13669 and SPARK-20898, commit ID: 9e50a1d37a4cf0c34e20a7c1a910ceaff41535a2#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489549
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
 
 Review comment:
   SPARK-8425, commit ID: 93cdb8a7d0f124b4db069fd8242207c82e263c52#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] HyukjinKwon closed pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
HyukjinKwon closed pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852
 
 
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487774
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks. " +
         "In case of dynamic allocation if this feature is enabled executors having only disk " +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_DB_ENABLED =
     ConfigBuilder("spark.shuffle.service.db.enabled")
       .doc("Whether to use db in ExternalShuffleService. Note that this only affects " +
         "standalone mode.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(true)
 
   private[spark] val SHUFFLE_SERVICE_PORT =
-    ConfigBuilder("spark.shuffle.service.port").intConf.createWithDefault(7337)
+    ConfigBuilder("spark.shuffle.service.port").version("1.2.0").intConf.createWithDefault(7337)
 
   private[spark] val KEYTAB = ConfigBuilder("spark.kerberos.keytab")
     .doc("Location of user's keytab.")
+    .version("3.0.0")
 
 Review comment:
   SPARK-25372, commit ID: 51540c2fa677658be954c820bc18ba748e4c8583#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] SparkQA commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
SparkQA commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596362272
 
 
   **[Test build #119552 has finished](https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/119552/testReport)** for PR 27852 at commit [`dc79da0`](https://github.com/apache/spark/commit/dc79da0cbcfb885820e57167b471ba9a8aea3036).
    * This patch **fails due to an unknown error code, -9**.
    * This patch merges cleanly.
    * This patch adds no public classes.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596362301
 
 
   Merged build finished. Test FAILed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] SparkQA removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
SparkQA removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596358446
 
 
   **[Test build #119552 has started](https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/119552/testReport)** for PR 27852 at commit [`dc79da0`](https://github.com/apache/spark/commit/dc79da0cbcfb885820e57167b471ba9a8aea3036).

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487618
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks. " +
         "In case of dynamic allocation if this feature is enabled executors having only disk " +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_DB_ENABLED =
     ConfigBuilder("spark.shuffle.service.db.enabled")
       .doc("Whether to use db in ExternalShuffleService. Note that this only affects " +
         "standalone mode.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-26288, commit ID: 8b0aa59218c209d39cbba5959302d8668b885cf6#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488659
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
 
 Review comment:
   SPARK-18761, commit ID: 678d91c1d2283d9965a39656af9d383bad093ba8#diff-5a0de266c82b95adb47d9bca714e1f1b

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489093
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
 
 Review comment:
   SPARK-17675, commit ID: 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486405
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
 
 Review comment:
   SPARK-4585, commit ID: b2047b55c5fc85de6b63276d8ab9610d2496e08b#diff-b096353602813e47074ace09a3890d56

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389488243
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
 
 Review comment:
   SPARK-1126, commit ID: 1617816090e7b20124a512a43860a21232ebf511#diff-4d2ab44195558d5a9d5f15b8803ef39d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596362304
 
 
   Test FAILed.
   Refer to this link for build results (access rights to CI server needed): 
   https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder/119552/
   Test FAILed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489767
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedExecutorsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val BLACKLIST_TIMEOUT_CONF =
     ConfigBuilder("spark.blacklist.timeout")
+      .version("2.1.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .createOptional
 
   private[spark] val BLACKLIST_KILL_ENABLED =
     ConfigBuilder("spark.blacklist.killBlacklistedExecutors")
+      .version("2.2.0")
 
 Review comment:
   SPARK-16554, commit ID: 6287c94f08200d548df5cc0a401b73b84f9968c4#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins removed a comment on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596358918
 
 
   Test PASSed.
   Refer to this link for build results (access rights to CI server needed): 
   https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder-K8s/24284/
   Test PASSed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389485994
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
 Review comment:
   No JIRA ID, commit ID: e5c4cd8a5e188592f8786a265c0cd073c69ac886#diff-391214d132a0fb4478f4f9c2313d8966

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489403
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedExecutorsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val BLACKLIST_TIMEOUT_CONF =
     ConfigBuilder("spark.blacklist.timeout")
+      .version("2.1.0")
 
 Review comment:
   SPARK-17675, commit ID: 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486848
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
 
 Review comment:
   SPARK-27963, commit ID: 2ddeff97d7329942a98ef363991eeabc3fa71a76#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389490569
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -709,6 +770,7 @@ package object config {
       .internal()
       .doc("The time threshold of whether a event is considered to be taking too much time to " +
         s"process. Log the event if ${LISTENER_BUS_LOG_SLOW_EVENT_ENABLED.key} is true.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-29001, commit ID: 0346afa8fc348aa1b3f5110df747a64e3b2da388#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389486191
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
 
 Review comment:
   SPARK-3795, commit ID: 8d59b37b02eb36f37bcefafb952519d7dca744ad#diff-364713d7776956cb8b0a771e9b62f82d

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489225
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
 
 Review comment:
   SPARK-17675, commit ID: 9ce7d3e542e786c62f047c13f3001e178f76e06a#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
AmplabJenkins commented on issue #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#issuecomment-596358918
 
 
   Test PASSed.
   Refer to this link for build results (access rights to CI server needed): 
   https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder-K8s/24284/
   Test PASSed.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389490154
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -676,6 +733,7 @@ package object config {
       .doc("Whether to un-register all the outputs on the host in condition that we receive " +
         " a FetchFailure. This is set default to false, which means, we only un-register the " +
         " outputs related to the exact executor(instead of the host) on a FetchFailure.")
+      .version("2.3.0")
 
 Review comment:
   SPARK-19753, commit ID: dccc0aa3cf957c8eceac598ac81ac82f03b52105#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


[GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core

Posted by GitBox <gi...@apache.org>.
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489886
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem " +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedExecutorsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val BLACKLIST_TIMEOUT_CONF =
     ConfigBuilder("spark.blacklist.timeout")
+      .version("2.1.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .createOptional
 
   private[spark] val BLACKLIST_KILL_ENABLED =
     ConfigBuilder("spark.blacklist.killBlacklistedExecutors")
+      .version("2.2.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val BLACKLIST_LEGACY_TIMEOUT_CONF =
     ConfigBuilder("spark.scheduler.executorTaskBlacklistTime")
       .internal()
+      .version("1.0.0")
 
 Review comment:
   No JIRA ID, commit ID: ab747d39ddc7c8a314ed2fb26548fc5652af0d74#diff-bad3987c83bd22d46416d3dd9d208e76

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org