spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
Date Mon, 09 Mar 2020 06:56:44 GMT
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add
version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389487618
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -461,97 +461,133 @@ package object config {
       .intConf
       .createWithDefault(5)
 
-  private[spark] val IS_PYTHON_APP = ConfigBuilder("spark.yarn.isPython").internal()
-    .booleanConf.createWithDefault(false)
+  private[spark] val IS_PYTHON_APP =
+    ConfigBuilder("spark.yarn.isPython")
+      .internal()
+      .version("1.5.0")
+      .booleanConf
+      .createWithDefault(false)
 
-  private[spark] val CPUS_PER_TASK = ConfigBuilder("spark.task.cpus").intConf.createWithDefault(1)
+  private[spark] val CPUS_PER_TASK =
+    ConfigBuilder("spark.task.cpus").version("0.5.0").intConf.createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_ENABLED =
-    ConfigBuilder("spark.dynamicAllocation.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_TESTING =
-    ConfigBuilder("spark.dynamicAllocation.testing").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.dynamicAllocation.testing")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_MIN_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.minExecutors").intConf.createWithDefault(0)
+    ConfigBuilder("spark.dynamicAllocation.minExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(0)
 
   private[spark] val DYN_ALLOCATION_INITIAL_EXECUTORS =
     ConfigBuilder("spark.dynamicAllocation.initialExecutors")
+      .version("1.3.0")
       .fallbackConf(DYN_ALLOCATION_MIN_EXECUTORS)
 
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
-    ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
+    ConfigBuilder("spark.dynamicAllocation.maxExecutors")
+      .version("1.2.0")
+      .intConf
+      .createWithDefault(Int.MaxValue)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_ALLOCATION_RATIO =
     ConfigBuilder("spark.dynamicAllocation.executorAllocationRatio")
-      .doubleConf.createWithDefault(1.0)
+      .version("2.4.0")
+      .doubleConf
+      .createWithDefault(1.0)
 
   private[spark] val DYN_ALLOCATION_CACHED_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.cachedExecutorIdleTimeout")
+      .version("1.4.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Integer.MAX_VALUE)
 
   private[spark] val DYN_ALLOCATION_EXECUTOR_IDLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.executorIdleTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(60)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TRACKING =
     ConfigBuilder("spark.dynamicAllocation.shuffleTracking.enabled")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val DYN_ALLOCATION_SHUFFLE_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.shuffleTimeout")
+      .version("3.0.0")
       .timeConf(TimeUnit.MILLISECONDS)
       .checkValue(_ >= 0L, "Timeout must be >= 0.")
       .createWithDefault(Long.MaxValue)
 
   private[spark] val DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.schedulerBacklogTimeout")
+      .version("1.2.0")
       .timeConf(TimeUnit.SECONDS).createWithDefault(1)
 
   private[spark] val DYN_ALLOCATION_SUSTAINED_SCHEDULER_BACKLOG_TIMEOUT =
     ConfigBuilder("spark.dynamicAllocation.sustainedSchedulerBacklogTimeout")
+      .version("1.2.0")
       .fallbackConf(DYN_ALLOCATION_SCHEDULER_BACKLOG_TIMEOUT)
 
   private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+    .version("0.5.0")
     .timeConf(TimeUnit.MILLISECONDS)
     .createWithDefaultString("3s")
 
   private[spark] val SHUFFLE_SERVICE_ENABLED =
-    ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
+    ConfigBuilder("spark.shuffle.service.enabled")
+      .version("1.2.0")
+      .booleanConf
+      .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_FETCH_RDD_ENABLED =
     ConfigBuilder(Constants.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
       .doc("Whether to use the ExternalShuffleService for fetching disk persisted RDD blocks.
" +
         "In case of dynamic allocation if this feature is enabled executors having only disk
" +
         "persisted blocks are considered idle after " +
         "'spark.dynamicAllocation.executorIdleTimeout' and will be released accordingly.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val SHUFFLE_SERVICE_DB_ENABLED =
     ConfigBuilder("spark.shuffle.service.db.enabled")
       .doc("Whether to use db in ExternalShuffleService. Note that this only affects " +
         "standalone mode.")
+      .version("3.0.0")
 
 Review comment:
   SPARK-26288, commit ID: 8b0aa59218c209d39cbba5959302d8668b885cf6#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message