spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From andrewor14 <...@git.apache.org>
Subject [GitHub] spark pull request: SPARK-1706: Allow multiple executors per worke...
Date Tue, 07 Apr 2015 21:58:47 GMT
Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/731#discussion_r27925182
  
    --- Diff: core/src/main/scala/org/apache/spark/deploy/master/Master.scala ---
    @@ -583,31 +560,68 @@ private[master] class Master(
             }
             // Now that we've decided how many cores to give on each node, let's actually
give them
             for (pos <- 0 until numUsable) {
    -          if (assigned(pos) > 0) {
    -            val exec = app.addExecutor(usableWorkers(pos), assigned(pos))
    -            launchExecutor(usableWorkers(pos), exec)
    -            app.state = ApplicationState.RUNNING
    -          }
    +          allocateWorkerResourceToExecutors(app, assigned(pos), usableWorkers(pos))
             }
           }
         } else {
    -      // Pack each app into as few nodes as possible until we've assigned all its cores
    +      // Pack each app into as few workers as possible until we've assigned all its cores
           for (worker <- workers if worker.coresFree > 0 && worker.state ==
WorkerState.ALIVE) {
             for (app <- waitingApps if app.coresLeft > 0) {
    -          if (canUse(app, worker)) {
    -            val coresToUse = math.min(worker.coresFree, app.coresLeft)
    -            if (coresToUse > 0) {
    -              val exec = app.addExecutor(worker, coresToUse)
    -              launchExecutor(worker, exec)
    -              app.state = ApplicationState.RUNNING
    -            }
    -          }
    +          allocateWorkerResourceToExecutors(app, app.coresLeft, worker)
             }
           }
         }
       }
     
    -  private def launchExecutor(worker: WorkerInfo, exec: ExecutorDesc) {
    +  /**
    +   * allocate resources in a certain worker to one or more executors
    +   * @param app the info of the application which the executors belong to
    +   * @param coresDemand the total number of cores to be allocated to this application
    --- End diff --
    
    More specifically, this should be "cores on this worker to be allocated to this application".
I would rename this variable to `coresToAllocate`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message