spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zsxwing <...@git.apache.org>
Subject [GitHub] spark pull request #18357: [SPARK-21146] [CORE] Worker should handle and shu...
Date Fri, 30 Jun 2017 00:54:57 GMT
Github user zsxwing commented on a diff in the pull request:

    https://github.com/apache/spark/pull/18357#discussion_r124944873
  
    --- Diff: core/src/main/scala/org/apache/spark/util/SparkUncaughtExceptionHandler.scala
---
    @@ -26,27 +26,34 @@ import org.apache.spark.internal.Logging
      */
     private[spark] object SparkUncaughtExceptionHandler
       extends Thread.UncaughtExceptionHandler with Logging {
    +  private[this] var exitOnException = true
     
    -  override def uncaughtException(thread: Thread, exception: Throwable) {
    -    try {
    -      // Make it explicit that uncaught exceptions are thrown when container is shutting
down.
    -      // It will help users when they analyze the executor logs
    -      val inShutdownMsg = if (ShutdownHookManager.inShutdown()) "[Container in shutdown]
" else ""
    -      val errMsg = "Uncaught exception in thread "
    -      logError(inShutdownMsg + errMsg + thread, exception)
    +  def apply(exitOnException: Boolean): Thread.UncaughtExceptionHandler = {
    +    this.exitOnException = exitOnException
    +    this
    +  }
     
    -      // We may have been called from a shutdown hook. If so, we must not call System.exit().
    -      // (If we do, we will deadlock.)
    -      if (!ShutdownHookManager.inShutdown()) {
    +  override def uncaughtException(thread: Thread, exception: Throwable) {
    +    // Make it explicit that uncaught exceptions are thrown when process is shutting
down.
    +    // It will help users when they analyze the executor logs
    +    val errMsg = "Uncaught exception in thread " + thread
    +    if (ShutdownHookManager.inShutdown()) {
    +      logError("[Process in shutdown] " + errMsg, exception)
    +    } else if (exception.isInstanceOf[Error] ||
    +      (!exception.isInstanceOf[Error] && exitOnException)) {
    +      try {
    +        logError(errMsg + ". Shutting down now..", exception)
             if (exception.isInstanceOf[OutOfMemoryError]) {
               System.exit(SparkExitCode.OOM)
             } else {
               System.exit(SparkExitCode.UNCAUGHT_EXCEPTION)
             }
    +      } catch {
    +        case oom: OutOfMemoryError => Runtime.getRuntime.halt(SparkExitCode.OOM)
    +        case t: Throwable => Runtime.getRuntime.halt(SparkExitCode.UNCAUGHT_EXCEPTION_TWICE)
           }
    -    } catch {
    -      case oom: OutOfMemoryError => Runtime.getRuntime.halt(SparkExitCode.OOM)
    --- End diff --
    
    > Do you mean moving the whole code in uncaughtException() to try block and having
the catch block?
    
    Yes.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message