Skip to content

Commit

Permalink
Refactor getDefaultPropertiesFile
Browse files Browse the repository at this point in the history
  • Loading branch information
witgo committed Oct 15, 2014
1 parent c45d20c commit 49ef70e
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
*/
private def mergeSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile)
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))

val properties = HashMap[String, String]()
properties.putAll(defaultSparkProperties)
Expand Down
26 changes: 8 additions & 18 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1417,7 +1417,7 @@ private[spark] object Utils extends Logging {
* already set. Return the path of the properties file used.
*/
def loadDefaultSparkProperties(conf: SparkConf, filePath: String = null): String = {
val path = Option(filePath).getOrElse(getDefaultPropertiesFile)
val path = Option(filePath).getOrElse(getDefaultPropertiesFile())
Option(path).foreach { confFile =>
getPropertiesFromFile(confFile).filter { case (k, v) =>
k.startsWith("spark.")
Expand Down Expand Up @@ -1449,23 +1449,13 @@ private[spark] object Utils extends Logging {
}

/** Return the path of the default Spark properties file. */
def getDefaultPropertiesFile(): String = {
val s = File.separator
def getAbsolutePath(filePath: String): String = {
Option(filePath)
.map(t => new File(t))
.filter(_.isFile)
.map(_.getAbsolutePath).orNull
}

val configFile = sys.env.get("SPARK_CONF_DIR")
.map(t => s"$t${s}spark-defaults.conf")
.map(getAbsolutePath).orNull

Option(configFile).getOrElse(sys.env.get("SPARK_HOME")
.map(t => s"${t}${s}conf${s}spark-defaults.conf")
.map(getAbsolutePath)
.orNull)
def getDefaultPropertiesFile(env: Map[String, String] = sys.env): String = {
env.get("SPARK_CONF_DIR")
.orElse(env.get("SPARK_HOME").map { t => s"$t${File.separator}conf" })
.map { t => new File(s"$t${File.separator}spark-defaults.conf")}
.filter(_.isFile)
.map(_.getAbsolutePath)
.orNull
}

/** Return a nice string representation of the exception, including the stack trace. */
Expand Down

0 comments on commit 49ef70e

Please sign in to comment.