Skip to content

Commit

Permalink
Review feedback.
Browse files Browse the repository at this point in the history
  • Loading branch information
Marcelo Vanzin committed Aug 25, 2014
1 parent 41f8c8a commit 0f5142c
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,8 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments,
conf = sparkConf, securityManager = securityMgr)._1
actor = waitForSparkDriver()
addAmIpFilter()
registerAM(sparkConf.get("spark.driver.appUIAddress", ""), "")
registerAM(sparkConf.get("spark.driver.appUIAddress", ""),
sparkConf.get("spark.driver.appUIHistoryAddress", ""))

// In client mode the actor will stop the reporter thread.
reporterThread.join()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ trait YarnRMClient {
/**
* Shuts down the AM. Guaranteed to only be called once.
*
* @param registered Whether the AM was successfully registered with the RM.
* @param status The final status of the AM.
* @param diagnostics Diagnostics message to include in the final status.
*/
def shutdown(status: FinalApplicationStatus, diagnostics: String = ""): Unit

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,14 +193,14 @@ object YarnSparkHadoopUtil {
}
}

def lookupRack(conf: Configuration, host: String): String = {
private[spark] def lookupRack(conf: Configuration, host: String): String = {
if (!hostToRack.contains(host)) {
populateRackInfo(conf, host)
}
hostToRack.get(host)
}

def populateRackInfo(conf: Configuration, hostname: String) {
private[spark] def populateRackInfo(conf: Configuration, hostname: String) {
Utils.checkHost(hostname)

if (!hostToRack.containsKey(hostname)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.scheduler.cluster

import org.apache.hadoop.yarn.api.records.{ApplicationId, YarnApplicationState}
import org.apache.spark.{SparkException, Logging, SparkContext}
import org.apache.spark.deploy.yarn.{ApplicationMaster, Client, ClientArguments, YarnSparkHadoopUtil}
import org.apache.spark.deploy.yarn.{Client, ClientArguments, YarnSparkHadoopUtil}
import org.apache.spark.scheduler.TaskSchedulerImpl

import scala.collection.mutable.ArrayBuffer
Expand Down

0 comments on commit 0f5142c

Please sign in to comment.