Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/42#discussion_r10284454
  
    --- Diff: core/src/main/scala/org/apache/spark/ui/SparkUI.scala ---
    @@ -17,38 +17,80 @@
     
     package org.apache.spark.ui
     
    +import java.io.{FileInputStream, File}
    +
    +import scala.io.Source
    +
    +import it.unimi.dsi.fastutil.io.FastBufferedInputStream
     import org.eclipse.jetty.server.{Handler, Server}
    +import org.json4s.jackson.JsonMethods._
     
     import org.apache.spark.{Logging, SparkContext, SparkEnv}
    +import org.apache.spark.scheduler._
     import org.apache.spark.ui.JettyUtils._
     import org.apache.spark.ui.env.EnvironmentUI
     import org.apache.spark.ui.exec.ExecutorsUI
     import org.apache.spark.ui.jobs.JobProgressUI
     import org.apache.spark.ui.storage.BlockManagerUI
     import org.apache.spark.util.Utils
    +import org.apache.spark.util.JsonProtocol
     
    -/** Top level user interface for Spark */
    -private[spark] class SparkUI(sc: SparkContext) extends Logging {
    +/** Top level user interface for Spark. */
    +private[spark] class SparkUI(val sc: SparkContext) extends Logging {
    +
    +  // If SparkContext is not provided, assume this UI is rendered from 
persisted storage
    +  val live = sc != null
       val host = 
Option(System.getenv("SPARK_PUBLIC_DNS")).getOrElse(Utils.localHostName())
    -  val port = sc.conf.get("spark.ui.port", SparkUI.DEFAULT_PORT).toInt
    +  var port = if (live) {
    +      sc.conf.get("spark.ui.port", SparkUI.DEFAULT_PORT).toInt
    +    } else {
    +      SparkUI.DEFAULT_PERSISTED_PORT.toInt
    +    }
       var boundPort: Option[Int] = None
       var server: Option[Server] = None
    +  var started = false
    +  var appName = ""
     
    -  val handlers = Seq[(String, Handler)](
    +  private val handlers = Seq[(String, Handler)](
         ("/static", createStaticHandler(SparkUI.STATIC_RESOURCE_DIR)),
         ("/", createRedirectHandler("/stages"))
       )
    -  val storage = new BlockManagerUI(sc)
    -  val jobs = new JobProgressUI(sc)
    -  val env = new EnvironmentUI(sc)
    -  val exec = new ExecutorsUI(sc)
    +  private val storage = new BlockManagerUI(this)
    +  private val jobs = new JobProgressUI(this)
    +  private val env = new EnvironmentUI(this)
    +  private val exec = new ExecutorsUI(this)
     
       // Add MetricsServlet handlers by default
    -  val metricsServletHandlers = 
SparkEnv.get.metricsSystem.getServletHandlers
    +  private val metricsServletHandlers = if (live) {
    +    SparkEnv.get.metricsSystem.getServletHandlers
    +  } else {
    +    Array[(String, Handler)]()
    +  }
     
    -  val allHandlers = storage.getHandlers ++ jobs.getHandlers ++ 
env.getHandlers ++
    +  private val allHandlers = storage.getHandlers ++ jobs.getHandlers ++ 
env.getHandlers ++
         exec.getHandlers ++ metricsServletHandlers ++ handlers
     
    +  // Maintain a gateway listener for all events to simplify event logging
    +  private var _gatewayListener: Option[GatewayUISparkListener] = None
    +
    +  def gatewayListener = _gatewayListener.getOrElse {
    +    val gateway = new GatewayUISparkListener(this, sc)
    +    _gatewayListener = Some(gateway)
    +    gateway
    +  }
    +
    +  // Only meaningful if port is set before binding
    +  def setPort(p: Int) = {
    --- End diff --
    
    We can even do ```def this(conf: SparkConf) = this(null, conf)``` and 
```def this(sc: SparkContext) = this(sc, sc.conf)```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

Reply via email to