Skip to content

Commit

Permalink
[SPARK-10236] [MLLIB] update since versions in mllib.feature
Browse files Browse the repository at this point in the history
Same as #8421 but for `mllib.feature`.

cc dbtsai

Author: Xiangrui Meng <[email protected]>

Closes #8449 from mengxr/SPARK-10236.feature and squashes the following commits:

0e8d658 [Xiangrui Meng] remove unnecessary comment
ad70b03 [Xiangrui Meng] update since versions in mllib.feature
  • Loading branch information
mengxr authored and DB Tsai committed Aug 26, 2015
1 parent 4657fa1 commit 321d775
Show file tree
Hide file tree
Showing 8 changed files with 21 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,6 @@ object PowerIterationClusteringModel extends Loader[PowerIterationClusteringMode
private[clustering]
val thisClassName = "org.apache.spark.mllib.clustering.PowerIterationClusteringModel"

/**
*/
@Since("1.4.0")
def save(sc: SparkContext, model: PowerIterationClusteringModel, path: String): Unit = {
val sqlContext = new SQLContext(sc)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.apache.spark.rdd.RDD
*/
@Since("1.3.0")
@Experimental
class ChiSqSelectorModel (
class ChiSqSelectorModel @Since("1.3.0") (
@Since("1.3.0") val selectedFeatures: Array[Int]) extends VectorTransformer {

require(isSorted(selectedFeatures), "Array has to be sorted asc")
Expand Down Expand Up @@ -112,7 +112,7 @@ class ChiSqSelectorModel (
*/
@Since("1.3.0")
@Experimental
class ChiSqSelector (
class ChiSqSelector @Since("1.3.0") (
@Since("1.3.0") val numTopFeatures: Int) extends Serializable {

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ import org.apache.spark.mllib.linalg._
*/
@Since("1.4.0")
@Experimental
class ElementwiseProduct(val scalingVec: Vector) extends VectorTransformer {
class ElementwiseProduct @Since("1.4.0") (
@Since("1.4.0") val scalingVec: Vector) extends VectorTransformer {

/**
* Does the hadamard product transformation.
Expand Down
6 changes: 4 additions & 2 deletions mllib/src/main/scala/org/apache/spark/mllib/feature/IDF.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,9 @@ import org.apache.spark.rdd.RDD
*/
@Since("1.1.0")
@Experimental
class IDF(val minDocFreq: Int) {
class IDF @Since("1.2.0") (@Since("1.2.0") val minDocFreq: Int) {

@Since("1.1.0")
def this() = this(0)

// TODO: Allow different IDF formulations.
Expand Down Expand Up @@ -162,7 +163,8 @@ private object IDF {
* Represents an IDF model that can transform term frequency vectors.
*/
@Experimental
class IDFModel private[spark] (val idf: Vector) extends Serializable {
@Since("1.1.0")
class IDFModel private[spark] (@Since("1.1.0") val idf: Vector) extends Serializable {

/**
* Transforms term frequency (TF) vectors to TF-IDF vectors.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors
*/
@Since("1.1.0")
@Experimental
class Normalizer(p: Double) extends VectorTransformer {
class Normalizer @Since("1.1.0") (p: Double) extends VectorTransformer {

@Since("1.1.0")
def this() = this(2)
Expand Down
7 changes: 5 additions & 2 deletions mllib/src/main/scala/org/apache/spark/mllib/feature/PCA.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import org.apache.spark.rdd.RDD
* @param k number of principal components
*/
@Since("1.4.0")
class PCA(val k: Int) {
class PCA @Since("1.4.0") (@Since("1.4.0") val k: Int) {
require(k >= 1, s"PCA requires a number of principal components k >= 1 but was given $k")

/**
Expand Down Expand Up @@ -74,7 +74,10 @@ class PCA(val k: Int) {
* @param k number of principal components.
* @param pc a principal components Matrix. Each column is one principal component.
*/
class PCAModel private[spark] (val k: Int, val pc: DenseMatrix) extends VectorTransformer {
@Since("1.4.0")
class PCAModel private[spark] (
@Since("1.4.0") val k: Int,
@Since("1.4.0") val pc: DenseMatrix) extends VectorTransformer {
/**
* Transform a vector by computed Principal Components.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import org.apache.spark.rdd.RDD
*/
@Since("1.1.0")
@Experimental
class StandardScaler(withMean: Boolean, withStd: Boolean) extends Logging {
class StandardScaler @Since("1.1.0") (withMean: Boolean, withStd: Boolean) extends Logging {

@Since("1.1.0")
def this() = this(false, true)
Expand Down Expand Up @@ -74,11 +74,11 @@ class StandardScaler(withMean: Boolean, withStd: Boolean) extends Logging {
*/
@Since("1.1.0")
@Experimental
class StandardScalerModel (
val std: Vector,
val mean: Vector,
var withStd: Boolean,
var withMean: Boolean) extends VectorTransformer {
class StandardScalerModel @Since("1.3.0") (
@Since("1.3.0") val std: Vector,
@Since("1.1.0") val mean: Vector,
@Since("1.3.0") var withStd: Boolean,
@Since("1.3.0") var withMean: Boolean) extends VectorTransformer {

/**
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,7 @@ class Word2Vec extends Serializable with Logging {
* (i * vectorSize, i * vectorSize + vectorSize)
*/
@Experimental
@Since("1.1.0")
class Word2VecModel private[mllib] (
private val wordIndex: Map[String, Int],
private val wordVectors: Array[Float]) extends Serializable with Saveable {
Expand Down

0 comments on commit 321d775

Please sign in to comment.