From f390b131e1ae399851d70bdce18186eb89b0dc86 Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Sun, 6 Apr 2014 12:22:51 -0700 Subject: [PATCH] Better visibility for workaround constructors --- .../scala/org/apache/spark/SparkContext.scala | 20 +++++++++---------- .../spark/api/java/JavaSparkContext.scala | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 67b732c9582be..5b450756a23c5 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -85,10 +85,6 @@ class SparkContext(config: SparkConf) def this(master: String, appName: String, conf: SparkConf) = this(SparkContext.updatedConf(conf, master, appName)) - // NOTE: The below constructors could be consolidated using default arguments. Due to - // Scala bug SI-8479, however, this causes the compile step to fail when generating docs. - // Until we have a good workaround for that bug the constructors remain broken out. - /** * Alternative constructor that allows setting common Spark properties directly * @@ -111,14 +107,18 @@ class SparkContext(config: SparkConf) this.preferredNodeLocationData = preferredNodeLocationData } + // NOTE: The below constructors could be consolidated using default arguments. Due to + // Scala bug SI-8479, however, this causes the compile step to fail when generating docs. + // Until we have a good workaround for that bug the constructors remain broken out. + /** * Alternative constructor that allows setting common Spark properties directly * * @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]). * @param appName A name for your application, to display on the cluster web UI. */ - def this(master: String, appName: String) = - this(master, appName, null, Nil, Map()) + private[spark] def this(master: String, appName: String) = + this(master, appName, null, Nil, Map(), Map()) /** * Alternative constructor that allows setting common Spark properties directly @@ -127,8 +127,8 @@ class SparkContext(config: SparkConf) * @param appName A name for your application, to display on the cluster web UI. * @param sparkHome Location where Spark is installed on cluster nodes. */ - def this(master: String, appName: String, sparkHome: String) = - this(master, appName, sparkHome, Nil, Map()) + private[spark] def this(master: String, appName: String, sparkHome: String) = + this(master, appName, sparkHome, Nil, Map(), Map()) /** * Alternative constructor that allows setting common Spark properties directly @@ -139,8 +139,8 @@ class SparkContext(config: SparkConf) * @param jars Collection of JARs to send to the cluster. These can be paths on the local file * system or HDFS, HTTP, HTTPS, or FTP URLs. */ - def this(master: String, appName: String, sparkHome: String, jars: Seq[String]) = - this(master, appName, sparkHome, jars, Map()) + private[spark] def this(master: String, appName: String, sparkHome: String, jars: Seq[String]) = + this(master, appName, sparkHome, jars, Map(), Map()) private[spark] val conf = config.clone() diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index e531a57aced31..2d313d73cc8f5 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -89,7 +89,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork */ def this(master: String, appName: String, sparkHome: String, jars: Array[String], environment: JMap[String, String]) = - this(new SparkContext(master, appName, sparkHome, jars.toSeq, environment)) + this(new SparkContext(master, appName, sparkHome, jars.toSeq, environment, Map())) private[spark] val env = sc.env