spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [spark] beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add version information to the configuration of Core
Date Mon, 09 Mar 2020 07:05:10 GMT
beliefer commented on a change in pull request #27852: [SPARK-31002][CORE][DOC][FOLLOWUP] Add
version information to the configuration of Core
URL: https://github.com/apache/spark/pull/27852#discussion_r389489654
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/internal/config/package.scala
 ##########
 @@ -569,104 +606,124 @@ package object config {
     ConfigBuilder("spark.kerberos.access.hadoopFileSystems")
     .doc("Extra Hadoop filesystem URLs for which to request delegation tokens. The filesystem
" +
       "that hosts fs.defaultFS does not need to be listed here.")
+    .version("3.0.0")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val EXECUTOR_INSTANCES = ConfigBuilder("spark.executor.instances")
+    .version("1.0.0")
     .intConf
     .createOptional
 
   private[spark] val PY_FILES = ConfigBuilder("spark.yarn.dist.pyFiles")
     .internal()
+    .version("2.2.1")
     .stringConf
     .toSequence
     .createWithDefault(Nil)
 
   private[spark] val TASK_MAX_DIRECT_RESULT_SIZE =
     ConfigBuilder("spark.task.maxDirectResultSize")
+      .version("2.0.0")
       .bytesConf(ByteUnit.BYTE)
       .createWithDefault(1L << 20)
 
   private[spark] val TASK_MAX_FAILURES =
     ConfigBuilder("spark.task.maxFailures")
+      .version("0.8.0")
       .intConf
       .createWithDefault(4)
 
   private[spark] val TASK_REAPER_ENABLED =
     ConfigBuilder("spark.task.reaper.enabled")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(false)
 
   private[spark] val TASK_REAPER_KILL_TIMEOUT =
     ConfigBuilder("spark.task.reaper.killTimeout")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefault(-1)
 
   private[spark] val TASK_REAPER_POLLING_INTERVAL =
     ConfigBuilder("spark.task.reaper.pollingInterval")
+      .version("2.0.3")
       .timeConf(TimeUnit.MILLISECONDS)
       .createWithDefaultString("10s")
 
   private[spark] val TASK_REAPER_THREAD_DUMP =
     ConfigBuilder("spark.task.reaper.threadDump")
+      .version("2.0.3")
       .booleanConf
       .createWithDefault(true)
 
   // Blacklist confs
   private[spark] val BLACKLIST_ENABLED =
     ConfigBuilder("spark.blacklist.enabled")
+      .version("2.1.0")
       .booleanConf
       .createOptional
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_EXECUTOR =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(1)
 
   private[spark] val MAX_TASK_ATTEMPTS_PER_NODE =
     ConfigBuilder("spark.blacklist.task.maxTaskAttemptsPerNode")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC =
     ConfigBuilder("spark.blacklist.application.maxFailedTasksPerExecutor")
+      .version("2.2.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILURES_PER_EXEC_STAGE =
     ConfigBuilder("spark.blacklist.stage.maxFailedTasksPerExecutor")
+      .version("2.1.0")
       .intConf
       .createWithDefault(2)
 
   private[spark] val MAX_FAILED_EXEC_PER_NODE =
     ConfigBuilder("spark.blacklist.application.maxFailedExecutorsPerNode")
+      .version("2.2.0")
 
 Review comment:
   SPARK-8425, commit ID: 93cdb8a7d0f124b4db069fd8242207c82e263c52#diff-6bdad48cfc34314e89599655442ff210

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message