spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From JoshRosen <...@git.apache.org>
Subject [GitHub] spark pull request: [SPARK-12895] Implement TaskMetrics with accum...
Date Sun, 24 Jan 2016 21:42:57 GMT
Github user JoshRosen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/10835#discussion_r50642938
  
    --- Diff: core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala ---
    @@ -230,86 +299,50 @@ class TaskMetrics extends Serializable {
        */
       def shuffleWriteMetrics: Option[ShuffleWriteMetrics] = _shuffleWriteMetrics
     
    -  @deprecated("setting ShuffleWriteMetrics is for internal use only", "2.0.0")
    -  def shuffleWriteMetrics_=(swm: Option[ShuffleWriteMetrics]): Unit = {
    -    _shuffleWriteMetrics = swm
    -  }
    -
       /**
        * Get or create a new [[ShuffleWriteMetrics]] associated with this task.
        */
       private[spark] def registerShuffleWriteMetrics(): ShuffleWriteMetrics = synchronized
{
         _shuffleWriteMetrics.getOrElse {
    -      val metrics = new ShuffleWriteMetrics
    +      val metrics = new ShuffleWriteMetrics(initialAccumsMap)
           _shuffleWriteMetrics = Some(metrics)
           metrics
         }
       }
     
    -  private var _updatedBlockStatuses: Seq[(BlockId, BlockStatus)] =
    -    Seq.empty[(BlockId, BlockStatus)]
    -
    -  /**
    -   * Storage statuses of any blocks that have been updated as a result of this task.
    -   */
    -  def updatedBlockStatuses: Seq[(BlockId, BlockStatus)] = _updatedBlockStatuses
    -
    -  @deprecated("setting updated blocks is for internal use only", "2.0.0")
    -  def updatedBlocks_=(ub: Option[Seq[(BlockId, BlockStatus)]]): Unit = {
    -    _updatedBlockStatuses = ub.getOrElse(Seq.empty[(BlockId, BlockStatus)])
    -  }
    -
    -  private[spark] def incUpdatedBlockStatuses(v: Seq[(BlockId, BlockStatus)]): Unit =
{
    -    _updatedBlockStatuses ++= v
    -  }
    -
    -  private[spark] def setUpdatedBlockStatuses(v: Seq[(BlockId, BlockStatus)]): Unit =
{
    -    _updatedBlockStatuses = v
    -  }
    -
    -  @deprecated("use updatedBlockStatuses instead", "2.0.0")
    -  def updatedBlocks: Option[Seq[(BlockId, BlockStatus)]] = {
    -    if (_updatedBlockStatuses.nonEmpty) Some(_updatedBlockStatuses) else None
    -  }
    -
    -  private[spark] def updateInputMetrics(): Unit = synchronized {
    -    inputMetrics.foreach(_.updateBytesRead())
    -  }
     
    -  @throws(classOf[IOException])
    -  private def readObject(in: ObjectInputStream): Unit = Utils.tryOrIOException {
    -    in.defaultReadObject()
    -    // Get the hostname from cached data, since hostname is the order of number of nodes
in
    -    // cluster, so using cached hostname will decrease the object number and alleviate
the GC
    -    // overhead.
    -    _hostname = TaskMetrics.getCachedHostName(_hostname)
    -  }
    -
    -  private var _accumulatorUpdates: Map[Long, Any] = Map.empty
    -  @transient private var _accumulatorsUpdater: () => Map[Long, Any] = null
    +  /* ========================== *
    +   |        OTHER THINGS        |
    +   * ========================== */
     
    -  private[spark] def updateAccumulators(): Unit = synchronized {
    -    _accumulatorUpdates = _accumulatorsUpdater()
    +  private[spark] def registerAccumulator(a: Accumulable[_, _]): Unit = {
    +    accums += a
       }
    -
       /**
        * Return the latest updates of accumulators in this task.
        */
    -  def accumulatorUpdates(): Map[Long, Any] = _accumulatorUpdates
    +  private[spark] def accumulatorUpdates(): Map[Long, Any] =
    +    accums.map { a => (a.id, a.localValue) }.toMap[Long, Any]
     
    -  private[spark] def setAccumulatorsUpdater(accumulatorsUpdater: () => Map[Long, Any]):
Unit = {
    --- End diff --
    
    Happy to see this method gone; this was really confusingly named.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message