spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From JoshRosen <...@git.apache.org>
Subject [GitHub] spark pull request: [SPARK-5549] Define TaskContext interface in S...
Date Tue, 03 Feb 2015 06:32:36 GMT
Github user JoshRosen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4324#discussion_r23985377
  
    --- Diff: core/src/main/scala/org/apache/spark/TaskContext.scala ---
    @@ -15,112 +15,116 @@
      * limitations under the License.
      */
     
    -package org.apache.spark;
    +package org.apache.spark
     
    -import java.io.Serializable;
    +import java.io.Serializable
     
    -import scala.Function0;
    -import scala.Function1;
    -import scala.Unit;
    +import org.apache.spark.annotation.DeveloperApi
    +import org.apache.spark.executor.TaskMetrics
    +import org.apache.spark.util.TaskCompletionListener
     
    -import org.apache.spark.annotation.DeveloperApi;
    -import org.apache.spark.executor.TaskMetrics;
    -import org.apache.spark.util.TaskCompletionListener;
     
    -/**
    - * Contextual information about a task which can be read or mutated during
    - * execution. To access the TaskContext for a running task use
    - * TaskContext.get().
    - */
    -public abstract class TaskContext implements Serializable {
    +object TaskContext {
       /**
        * Return the currently active TaskContext. This can be called inside of
        * user functions to access contextual information about running tasks.
        */
    -  public static TaskContext get() {
    -    return taskContext.get();
    -  }
    +  def get(): TaskContext = taskContext.get
    +
    +  private val taskContext: ThreadLocal[TaskContext] = new ThreadLocal[TaskContext]
    +
    +  private[spark] def setTaskContext(tc: TaskContext): Unit = taskContext.set(tc)
    +
    +  private[spark] def unset(): Unit = taskContext.remove()
    +}
     
    -  private static ThreadLocal<TaskContext> taskContext =
    -    new ThreadLocal<TaskContext>();
     
    -  static void setTaskContext(TaskContext tc) {
    -    taskContext.set(tc);
    -  }
    +/**
    + * Contextual information about a task which can be read or mutated during
    + * execution. To access the TaskContext for a running task, use:
    + * {{{
    + *   org.apache.spark.TaskContext.get()
    + * }}}
    + */
    +abstract class TaskContext extends Serializable {
    +  // Note: TaskContext must NOT define a get method. Otherwise it will prevent the Scala
compiler
    +  // from generating a static get method (based on the companion object's get method).
     
    -  static void unset() {
    -    taskContext.remove();
    -  }
    +  // Note: getters in this class are defined with parentheses to maintain backward compatibility.
     
       /**
    -   * Whether the task has completed.
    +   * Returns true if the task has completed.
        */
    -  public abstract boolean isCompleted();
    +  def isCompleted(): Boolean
     
       /**
    -   * Whether the task has been killed.
    +   * Returns true if the task has been killed.
        */
    -  public abstract boolean isInterrupted();
    +  def isInterrupted(): Boolean
     
    -  /** @deprecated use {@link #isRunningLocally()} */
    -  @Deprecated
    -  public abstract boolean runningLocally();
    +  /** @deprecated use { @link #isRunningLocally()}*/
    +  @deprecated("1.2.0", "use isRunningLocally")
    +  def runningLocally(): Boolean
     
    -  public abstract boolean isRunningLocally();
    +  /**
    +   * Returns true if the task is running locally in the driver program.
    +   * @return
    --- End diff --
    
    Can probably drop this `@return` tag.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message