-
Notifications
You must be signed in to change notification settings - Fork 28.4k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[SPARK-3359][DOCS] Make javadoc8 working for unidoc/genjavadoc compatibility in Java API documentation #16013
Changes from 9 commits
aa2dcb8
aa5acbb
ff17c3a
1593545
558d5e3
9a9d0cd
842a738
3660317
22bfb68
73fcd35
246bef3
a2a2011
ee3b96b
65d8f79
2defa60
d2c6e86
7d44dc5
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -262,7 +262,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria | |
/** | ||
* Get a time parameter as seconds; throws a NoSuchElementException if it's not set. If no | ||
* suffix is provided then seconds are assumed. | ||
* @throws NoSuchElementException | ||
* @note Throws `NoSuchElementException` | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Oh, I think this may be resolved if |
||
*/ | ||
def getTimeAsSeconds(key: String): Long = { | ||
Utils.timeStringAsSeconds(get(key)) | ||
|
@@ -279,7 +279,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria | |
/** | ||
* Get a time parameter as milliseconds; throws a NoSuchElementException if it's not set. If no | ||
* suffix is provided then milliseconds are assumed. | ||
* @throws NoSuchElementException | ||
* @note Throws `NoSuchElementException` | ||
*/ | ||
def getTimeAsMs(key: String): Long = { | ||
Utils.timeStringAsMs(get(key)) | ||
|
@@ -296,7 +296,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria | |
/** | ||
* Get a size parameter as bytes; throws a NoSuchElementException if it's not set. If no | ||
* suffix is provided then bytes are assumed. | ||
* @throws NoSuchElementException | ||
* @note Throws `NoSuchElementException` | ||
*/ | ||
def getSizeAsBytes(key: String): Long = { | ||
Utils.byteStringAsBytes(get(key)) | ||
|
@@ -320,7 +320,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria | |
/** | ||
* Get a size parameter as Kibibytes; throws a NoSuchElementException if it's not set. If no | ||
* suffix is provided then Kibibytes are assumed. | ||
* @throws NoSuchElementException | ||
* @note Throws `NoSuchElementException` | ||
*/ | ||
def getSizeAsKb(key: String): Long = { | ||
Utils.byteStringAsKb(get(key)) | ||
|
@@ -337,7 +337,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria | |
/** | ||
* Get a size parameter as Mebibytes; throws a NoSuchElementException if it's not set. If no | ||
* suffix is provided then Mebibytes are assumed. | ||
* @throws NoSuchElementException | ||
* @note Throws `NoSuchElementException` | ||
*/ | ||
def getSizeAsMb(key: String): Long = { | ||
Utils.byteStringAsMb(get(key)) | ||
|
@@ -354,7 +354,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria | |
/** | ||
* Get a size parameter as Gibibytes; throws a NoSuchElementException if it's not set. If no | ||
* suffix is provided then Gibibytes are assumed. | ||
* @throws NoSuchElementException | ||
* @note Throws `NoSuchElementException` | ||
*/ | ||
def getSizeAsGb(key: String): Long = { | ||
Utils.byteStringAsGb(get(key)) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -645,7 +645,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
|
||
/** | ||
* Get a local property set in this thread, or null if it is missing. See | ||
* [[org.apache.spark.SparkContext.setLocalProperty]]. | ||
* `org.apache.spark.SparkContext.setLocalProperty`. | ||
*/ | ||
def getLocalProperty(key: String): String = | ||
Option(localProperties.get).map(_.getProperty(key)).orNull | ||
|
@@ -663,7 +663,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
* Application programmers can use this method to group all those jobs together and give a | ||
* group description. Once set, the Spark web UI will associate such jobs with this group. | ||
* | ||
* The application can also use [[org.apache.spark.SparkContext.cancelJobGroup]] to cancel all | ||
* The application can also use `org.apache.spark.SparkContext.cancelJobGroup` to cancel all | ||
* running jobs in this group. For example, | ||
* {{{ | ||
* // In the main thread: | ||
|
@@ -1384,7 +1384,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
} | ||
|
||
/** | ||
* Create and register a [[CollectionAccumulator]], which starts with empty list and accumulates | ||
* Create and register a `CollectionAccumulator`, which starts with empty list and accumulates | ||
* inputs by adding them into the list. | ||
*/ | ||
def collectionAccumulator[T]: CollectionAccumulator[T] = { | ||
|
@@ -1394,7 +1394,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
} | ||
|
||
/** | ||
* Create and register a [[CollectionAccumulator]], which starts with empty list and accumulates | ||
* Create and register a `CollectionAccumulator`, which starts with empty list and accumulates | ||
* inputs by adding them into the list. | ||
*/ | ||
def collectionAccumulator[T](name: String): CollectionAccumulator[T] = { | ||
|
@@ -2043,7 +2043,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
} | ||
|
||
/** | ||
* Cancel active jobs for the specified group. See [[org.apache.spark.SparkContext.setJobGroup]] | ||
* Cancel active jobs for the specified group. See `org.apache.spark.SparkContext.setJobGroup` | ||
* for more information. | ||
*/ | ||
def cancelJobGroup(groupId: String) { | ||
|
@@ -2061,7 +2061,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
* Cancel a given job if it's scheduled or running. | ||
* | ||
* @param jobId the job ID to cancel | ||
* @throws InterruptedException if the cancel message cannot be sent | ||
* @note Throws `InterruptedException` if the cancel message cannot be sent | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hm, InterruptedException is in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Sure, I will try! There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hm.. interesting. I haven't looked into this deeper but it seems it fails anyway.
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hm, so it's just complaining that it's documented as a checked exception but can't be thrown according to the byte code. It has a point there, but I am also kind of surprised it's an error. OK Leave it the way you have it as it seems like it's the only way that works. |
||
*/ | ||
def cancelJob(jobId: Int) { | ||
dagScheduler.cancelJob(jobId) | ||
|
@@ -2071,7 +2071,7 @@ class SparkContext(config: SparkConf) extends Logging { | |
* Cancel a given stage and all jobs associated with it. | ||
* | ||
* @param stageId the stage ID to cancel | ||
* @throws InterruptedException if the cancel message cannot be sent | ||
* @note Throws `InterruptedException` if the cancel message cannot be sent | ||
*/ | ||
def cancelStage(stageId: Int) { | ||
dagScheduler.cancelStage(stageId) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -155,7 +155,7 @@ class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable { | |
* to the right except for the last which is closed | ||
* e.g. for the array | ||
* [1, 10, 20, 50] the buckets are [1, 10) [10, 20) [20, 50] | ||
* e.g 1<=x<10 , 10<=x<20, 20<=x<=50 | ||
* e.g 1<=x<10 , 10<=x<20, 20<=x<=50 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This originally gives an error as below
However, after fixing it as above, This is being printed as they are in javadoc (not in scaladoc) It seems we should find another approach to deal with this. It seems There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. In generated javadoc,
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. note to myself, It seems inlined tags such as
and
work also okay for both but they are valid ones for javadoc. For scaladoc, they are dealt with monospace text (like |
||
* And on the input of 1 and 50 we would have a histogram of 1, 0, 1 | ||
* | ||
* @note If your histogram is evenly spaced (e.g. [0, 10, 20, 30]) this can be switched | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I just decided to keep original format rather than trying to make this pretty.
The original was as below:
Scala
Java
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
After this PR it still prints the same.
Scala
Java