Skip to content

Commit

Permalink
Make history server's web UI port a Spark configuration
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewor14 committed Apr 10, 2014
1 parent 2dfb494 commit f7f5bf0
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 35 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,9 @@ import org.apache.spark.util.Utils
* EventLoggingListener.
*
* @param baseLogDir The base directory in which event logs are found
* @param requestedPort The requested port to which this server is to be bound
*/
class HistoryServer(
val baseLogDir: String,
requestedPort: Int,
conf: SparkConf)
extends SparkUIContainer("History Server") with Logging {

Expand All @@ -56,7 +54,7 @@ class HistoryServer(
private val fileSystem = Utils.getHadoopFileSystem(baseLogDir)
private val localHost = Utils.localHostName()
private val publicHost = Option(System.getenv("SPARK_PUBLIC_DNS")).getOrElse(localHost)
private val port = requestedPort
private val port = WEB_UI_PORT
private val securityManager = new SecurityManager(conf)
private val indexPage = new IndexPage(this)

Expand Down Expand Up @@ -243,7 +241,7 @@ class HistoryServer(
* start-history-server.sh and stop-history-server.sh. The path to a base log directory
* is must be specified, while the requested UI port is optional. For example:
*
* ./sbin/spark-history-server.sh /tmp/spark-events 18080
* ./sbin/spark-history-server.sh /tmp/spark-events
* ./sbin/spark-history-server.sh hdfs://1.2.3.4:9000/spark-events
*
* This launches the HistoryServer as a Spark daemon.
Expand All @@ -257,11 +255,14 @@ object HistoryServer {
// How many applications to retain
val RETAINED_APPLICATIONS = conf.getInt("spark.history.retainedApplications", 250)

// The port to which the web UI is bound
val WEB_UI_PORT = conf.getInt("spark.history.ui.port", 18080)

val STATIC_RESOURCE_DIR = SparkUI.STATIC_RESOURCE_DIR

def main(argStrings: Array[String]) {
val args = new HistoryServerArguments(argStrings)
val server = new HistoryServer(args.logDir, args.port, conf)
val server = new HistoryServer(args.logDir, conf)
server.bind()
server.start()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,31 +21,26 @@ import java.net.URI

import org.apache.hadoop.fs.Path

import org.apache.spark.util.{IntParam, Utils}
import org.apache.spark.util.Utils

/**
* Command-line parser for the master.
*/
private[spark] class HistoryServerArguments(args: Array[String]) {
var port = 18080
var logDir = ""

parse(args.toList)

private def parse(args: List[String]): Unit = {
args match {
case ("--port" | "-p") :: IntParam(value) :: tail =>
port = value
parse(tail)

case ("--dir" | "-d") :: value :: tail =>
logDir = value
parse(tail)

case ("--help" | "-h") :: tail =>
printUsageAndExit(0)

case Nil => {}
case Nil =>

case _ =>
printUsageAndExit(1)
Expand All @@ -71,7 +66,6 @@ private[spark] class HistoryServerArguments(args: Array[String]) {
"Usage: HistoryServer [options]\n" +
"\n" +
"Options:\n" +
" -p PORT, --port PORT Port for web server (default: 18080)\n" +
" -d DIR, --dir DIR Location of event log files")
System.exit(exitCode)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I

private val host = Utils.localHostName()
private val port = requestedPort.getOrElse(
worker.conf.get("worker.ui.port", WorkerWebUI.DEFAULT_PORT).toInt)
worker.conf.getInt("worker.ui.port", WorkerWebUI.DEFAULT_PORT))
private val indexPage = new IndexPage(this)

private val handlers: Seq[ServletContextHandler] = {
Expand Down Expand Up @@ -188,6 +188,6 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I
}

private[spark] object WorkerWebUI {
val DEFAULT_PORT=8081
val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR
val DEFAULT_PORT="8081"
}
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/ui/SparkUI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ private[spark] class SparkUI(

private val localHost = Utils.localHostName()
private val publicHost = Option(System.getenv("SPARK_PUBLIC_DNS")).getOrElse(localHost)
private val port = conf.get("spark.ui.port", SparkUI.DEFAULT_PORT).toInt
private val port = conf.getInt("spark.ui.port", SparkUI.DEFAULT_PORT)

private val storage = new BlockManagerUI(this)
private val jobs = new JobProgressUI(this)
Expand Down Expand Up @@ -118,6 +118,6 @@ private[spark] class SparkUI(
}

private[spark] object SparkUI {
val DEFAULT_PORT = "4040"
val DEFAULT_PORT = 4040
val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
}
18 changes: 12 additions & 6 deletions docs/monitoring.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,7 @@ You can start a the history server by executing:

The base logging directory must be supplied, and should contain sub-directories that each
represents an application's event logs. This creates a web interface at
`http://<server-url>:18080` by default, but the port can be changed by supplying an extra
parameter to the start script. The history server depends on the following variables:
`http://<server-url>:18080` by default. The history server depends on the following variables:

<table class="table">
<tr><th style="width:21%">Environment Variable</th><th>Meaning</th></tr>
Expand All @@ -62,16 +61,23 @@ Further, the history server can be configured as follows:
<td>spark.history.updateInterval</td>
<td>10</td>
<td>
The period at which information displayed by this history server is updated. Each update
checks for any changes made to the event logs in persisted storage.
The period, in seconds, at which information displayed by this history server is updated.
Each update checks for any changes made to the event logs in persisted storage.
</td>
</tr>
<tr>
<td>spark.history.retainedApplications</td>
<td>250</td>
<td>
The number of application UIs to retain. If this cap is exceeded, then the least recently
updated applications will be removed.
The number of application UIs to retain. If this cap is exceeded, then the oldest
applications will be removed.
</td>
</tr>
<tr>
<td>spark.history.ui.port</td>
<td>18080</td>
<td>
The port to which the web interface of the history server binds.
</td>
</tr>
</table>
Expand Down
15 changes: 3 additions & 12 deletions sbin/start-history-server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,20 +27,11 @@ sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`

if [ $# -lt 1 ]; then
echo "Usage: ./start-history-server.sh <base-log-dir> [<web-ui-port>]"
echo "Example: ./start-history-server.sh /tmp/spark-events 18080"
echo "Usage: ./start-history-server.sh <base-log-dir>"
echo "Example: ./start-history-server.sh /tmp/spark-events"
exit
fi

# Set up base event log directory
LOG_DIR=$1
shift

# Set up web UI port
if [ ! -z $1 ]; then
PORT=$1
else
PORT=18080
fi

"$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 --dir "$LOG_DIR" --port "$PORT"
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 --dir "$LOG_DIR"

0 comments on commit f7f5bf0

Please sign in to comment.