Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into SPARK-2177
Browse files Browse the repository at this point in the history
  • Loading branch information
yhuai committed Jun 19, 2014
2 parents 440c5af + 67fca18 commit 9787fff
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 2 deletions.
1 change: 0 additions & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ class SparkContext(config: SparkConf) extends Logging {

/** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
val hadoopConfiguration: Configuration = {
val env = SparkEnv.get
val hadoopConf = SparkHadoopUtil.get.newConfiguration()
// Explicitly check for S3 environment variables
if (System.getenv("AWS_ACCESS_KEY_ID") != null &&
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,13 @@ private[spark] object MetadataCleaner {
conf.set(MetadataCleanerType.systemProperty(cleanerType), delay.toString)
}

/**
* Set the default delay time (in seconds).
* @param conf SparkConf instance
* @param delay default delay time to set
* @param resetAll whether to reset all to default
*/
def setDelaySeconds(conf: SparkConf, delay: Int, resetAll: Boolean = true) {
// override for all ?
conf.set("spark.cleaner.ttl", delay.toString)
if (resetAll) {
for (cleanerType <- MetadataCleanerType.values) {
Expand Down

0 comments on commit 9787fff

Please sign in to comment.