spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pwendell <...@git.apache.org>
Subject [GitHub] spark pull request: [SPARK-1132] Persisting Web UI through refacto...
Date Sun, 16 Mar 2014 23:36:21 GMT
Github user pwendell commented on a diff in the pull request:

    https://github.com/apache/spark/pull/42#discussion_r10642674
  
    --- Diff: core/src/main/scala/org/apache/spark/ui/SparkUI.scala ---
    @@ -30,32 +30,65 @@ import org.apache.spark.ui.jobs.JobProgressUI
     import org.apache.spark.ui.storage.BlockManagerUI
     import org.apache.spark.util.Utils
     
    -/** Top level user interface for Spark */
    -private[spark] class SparkUI(sc: SparkContext) extends Logging {
    -  val host = Option(System.getenv("SPARK_PUBLIC_DNS")).getOrElse(Utils.localHostName())
    -  val port = sc.conf.get("spark.ui.port", SparkUI.DEFAULT_PORT).toInt
    -  var boundPort: Option[Int] = None
    -  var server: Option[Server] = None
    +/** Top level user interface for Spark. */
    +private[spark] class SparkUI(
    +    val sc: SparkContext,
    +    conf: SparkConf,
    +    val listenerBus: SparkListenerBus,
    +    val appName: String,
    +    val basePath: String = "")
    +  extends Logging {
    +
    +  def this(sc: SparkContext) = this(sc, sc.conf, sc.listenerBus, sc.appName)
    +  def this(conf: SparkConf, listenerBus: SparkListenerBus, appName: String, basePath:
String) =
    +    this(null, conf, listenerBus, appName, basePath)
    +
    +  // If SparkContext is not provided, assume the associated application is not live
    +  val live = sc != null
    +
    +  val securityManager = if (live) sc.env.securityManager else new SecurityManager(conf)
    +
    +  private val host = Option(System.getenv("SPARK_PUBLIC_DNS")).getOrElse(Utils.localHostName())
    +  private val port = conf.get("spark.ui.port", SparkUI.DEFAULT_PORT).toInt
    +  private var boundPort: Option[Int] = None
    +  private var server: Option[Server] = None
    +
    +  private val storage = new BlockManagerUI(this)
    +  private val jobs = new JobProgressUI(this)
    +  private val env = new EnvironmentUI(this)
    +  private val exec = new ExecutorsUI(this)
     
    -  val handlers = Seq[ServletContextHandler] (
    -    createStaticHandler(SparkUI.STATIC_RESOURCE_DIR, "/static/*"),
    -    createRedirectHandler("/stages", "/")
    -  )
    -  val storage = new BlockManagerUI(sc)
    -  val jobs = new JobProgressUI(sc)
    -  val env = new EnvironmentUI(sc)
    -  val exec = new ExecutorsUI(sc)
    +  val handlers: Seq[ServletContextHandler] = {
    +    val metricsServletHandlers = if (live) {
    +      SparkEnv.get.metricsSystem.getServletHandlers
    +    } else {
    +      Array[ServletContextHandler]()
    +    }
    +    storage.getHandlers ++
    +    jobs.getHandlers ++
    +    env.getHandlers ++
    +    exec.getHandlers ++
    +    metricsServletHandlers ++
    +    Seq[ServletContextHandler] (
    +      createStaticHandler(SparkUI.STATIC_RESOURCE_DIR, "/static/*"),
    +      createRedirectHandler("/", "/stages", basePath)
    +    )
    +  }
    +
    +  // Maintain executor storage status through Spark events
    +  val storageStatusListener = new StorageStatusListener
     
    -  // Add MetricsServlet handlers by default
    -  val metricsServletHandlers = SparkEnv.get.metricsSystem.getServletHandlers
    +  // Only log events if this SparkUI is live
    +  private var eventLogger: Option[EventLoggingListener] = None
    --- End diff --
    
    It seems a bit more intuitive for me to have this event logger tracked inside of `SparkContext`
rather than the UI


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

Mime
View raw message