You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2018/12/31 13:25:10 UTC

[GitHub] viirya commented on a change in pull request #23416: [SPARK-26463][CORE] Use ConfigEntry for hardcoded configs for scheduler categories.

viirya commented on a change in pull request #23416: [SPARK-26463][CORE] Use ConfigEntry for hardcoded configs for scheduler categories.
URL: https://github.com/apache/spark/pull/23416#discussion_r244584512
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -724,4 +773,120 @@ package object config {
       .stringConf
       .toSequence
       .createWithDefault(Nil)
+
+  private[spark] val CLEANER_PERIODIC_GC_INTERVAL =
+    ConfigBuilder("spark.cleaner.periodicGC.interval")
+      .timeConf(TimeUnit.SECONDS)
+      .createWithDefaultString("30min")
+
+  private[spark] val CLEANER_REFERENCE_TRACKING =
+    ConfigBuilder("spark.cleaner.referenceTracking")
+      .booleanConf
+      .createWithDefault(true)
+
+  private[spark] val CLEANER_REFERENCE_TRACKING_BLOCKING =
+    ConfigBuilder("spark.cleaner.referenceTracking.blocking")
+      .booleanConf
+      .createWithDefault(true)
+
+  private[spark] val CLEANER_REFERENCE_TRACKING_BLOCKING_SHUFFLE =
+    ConfigBuilder("spark.cleaner.referenceTracking.blocking.shuffle")
+      .booleanConf
+      .createWithDefault(false)
+
+  private[spark] val CLEANER_REFERENCE_TRACKING_CLEAN_CHECKPOINTS =
+    ConfigBuilder("spark.cleaner.referenceTracking.cleanCheckpoints")
+      .booleanConf
+      .createWithDefault(false)
+
+  private[spark] val RPC_ASK_TIMEOUT =
+    ConfigBuilder("spark.rpc.askTimeout")
+      .stringConf
+      .createOptional
+
+  private[spark] val RPC_CONNECT_THREADS =
+    ConfigBuilder("spark.rpc.connect.threads")
+      .intConf
+      .createWithDefault(64)
+
+  private[spark] val RPC_IO_NUM_CONNECTIONS_PER_PEER =
+    ConfigBuilder("spark.rpc.io.numConnectionsPerPeer")
+      .intConf
+      .createWithDefault(1)
+
+
+  private[spark] val RPC_IO_THREADS =
+    ConfigBuilder("spark.rpc.io.threads")
+      .intConf
+      .createOptional
+
+  private[spark] val RPC_LOOKUP_TIMEOUT =
+    ConfigBuilder("spark.rpc.lookupTimeout")
+      .stringConf
+      .createOptional
+
+  private[spark] val RPC_MESSAGE_MAX_SIZE =
+    ConfigBuilder("spark.rpc.message.maxSize")
+      .intConf
+      .createWithDefault(128)
+
+  private[spark] val RPC_NETTY_DISPATCHER_NUM_THREADS =
+    ConfigBuilder("spark.rpc.netty.dispatcher.numThreads")
+      .intConf
+      .createOptional
+
+  private[spark] val RPC_NUM_RETRIES =
+    ConfigBuilder("spark.rpc.numRetries")
+      .intConf
+      .createWithDefault(3)
+
+  private[spark] val RPC_RETRY_WAIT =
+    ConfigBuilder("spark.rpc.retry.wait")
+      .timeConf(TimeUnit.MILLISECONDS)
+      .createWithDefaultString("3s")
+
+  private[spark] val SCHEDULER_ALLOCATION_FILE =
+    ConfigBuilder("spark.scheduler.allocation.file")
+      .stringConf
+      .createOptional
+
+  private[spark] val SCHEDULER_MIN_REGISTERED_RESOURCES_RATIO =
+    ConfigBuilder("spark.scheduler.minRegisteredResourcesRatio")
+      .doubleConf
+      .createOptional
+
+  private[spark] val SCHEDULER_MAX_REGISTERED_RESOURCE_WAITING_TIME =
+    ConfigBuilder("spark.scheduler.maxRegisteredResourcesWaitingTime")
+      .timeConf(TimeUnit.MILLISECONDS)
+      .createWithDefaultString("30s")
+
+  private[spark] val SCHEDULER_MODE =
+    ConfigBuilder("spark.scheduler.mode")
+      .stringConf
+      .createWithDefault(SchedulingMode.FIFO.toString)
+
+  private[spark] val SCHEDULER_REVIVE_INTERVAL =
+    ConfigBuilder("spark.scheduler.revive.interval")
+      .timeConf(TimeUnit.MILLISECONDS)
+      .createOptional
+
+  private[spark] val SPECULATION =
 
 Review comment:
   I feels that `SPECULATION_ENABLED` is better than `SPECULATION`.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org