diff --git a/core/src/main/scala/org/apache/spark/FutureAction.scala b/core/src/main/scala/org/apache/spark/FutureAction.scala
index 0d5bad9e91e91..5468886bf1c6c 100644
--- a/core/src/main/scala/org/apache/spark/FutureAction.scala
+++ b/core/src/main/scala/org/apache/spark/FutureAction.scala
@@ -25,7 +25,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* A future for the result of an action to support cancellation. This is an extension of the
* Scala Future interface to support cancellation.
*/
@@ -85,7 +85,7 @@ trait FutureAction[T] extends Future[T] {
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* A [[FutureAction]] holding the result of an action that triggers a single job. Examples include
* count, collect, reduce.
*/
@@ -150,7 +150,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* A [[FutureAction]] for actions that could trigger multiple Spark jobs. Examples include take,
* takeSample. Cancellation works by setting the cancelled flag to true and interrupting the
* action thread if it is being blocked by a job.
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 2818a08b2081b..eb843d1f62a08 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1009,7 +1009,7 @@ class SparkContext(config: SparkConf)
}
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* Submit a job for execution and return a FutureJob holding the result.
*/
def submitJob[T, U, R](
diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala
index 0c98f341f7543..e1d10cab5988d 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -34,7 +34,7 @@ package org.apache
* for Spark programming APIs in Java.
*
* Classes and methods marked with
- * EXPERIMENTAL API are user-facing features which have not been officially adopted by the
+ * EXPERIMENTAL are user-facing features which have not been officially adopted by the
* Spark project. These are subject to change or removal in minor releases.
*
* Classes and methods marked with
diff --git a/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
index aee6c3c85cbea..12a7fff35e868 100644
--- a/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
+++ b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
@@ -18,7 +18,7 @@
package org.apache.spark.partial
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* A Double value with error bars and associated confidence.
*/
class BoundedDouble(val mean: Double, val confidence: Double, val low: Double, val high: Double) {
diff --git a/core/src/main/scala/org/apache/spark/partial/PartialResult.scala b/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
index f055543847dfc..3103b5783806f 100644
--- a/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
+++ b/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
@@ -18,7 +18,7 @@
package org.apache.spark.partial
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
*/
class PartialResult[R](initialVal: R, isFinal: Boolean) {
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
diff --git a/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala b/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
index 599324879d0f9..199945287da95 100644
--- a/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
@@ -26,7 +26,7 @@ import scala.reflect.ClassTag
import org.apache.spark.{ComplexFutureAction, FutureAction, Logging}
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* A set of asynchronous RDD actions available through an implicit conversion.
* Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
*/
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 66ecf1b4a1296..ddff5311b423f 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -783,7 +783,7 @@ abstract class RDD[T: ClassTag](
def count(): Long = sc.runJob(this, Utils.getIteratorSize _).sum
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
*
* Approximate version of count() that returns a potentially incomplete result
* within a timeout, even if not all tasks have finished.
@@ -831,7 +831,7 @@ abstract class RDD[T: ClassTag](
}
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
*
* Approximate version of countByValue().
*/
@@ -855,7 +855,7 @@ abstract class RDD[T: ClassTag](
}
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* Return approximate number of distinct elements in the RDD.
*
* The accuracy of approximation can be controlled through the relative standard deviation
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala
index d077bfc97f49d..e1937f7f0d9f8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SchemaRDD.scala
@@ -240,7 +240,7 @@ class SchemaRDD(
Filter(ScalaUdf(udf, BooleanType, Seq(UnresolvedAttribute(arg1.name))), logicalPlan))
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* Filters tuples using a function over a `Dynamic` version of a given Row. DynamicRows use
* scala's Dynamic trait to emulate an ORM of in a dynamically typed language. Since the type of
* the column is not known at compile time, all attributes are converted to strings before
@@ -258,7 +258,7 @@ class SchemaRDD(
Filter(ScalaUdf(dynamicUdf, BooleanType, Seq(WrapDynamic(logicalPlan.output))), logicalPlan))
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* Returns a sampled version of the underlying dataset.
*
* @group Query
@@ -270,7 +270,7 @@ class SchemaRDD(
new SchemaRDD(sqlContext, Sample(fraction, withReplacement, seed, logicalPlan))
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* Applies the given Generator, or table generating function, to this relation.
*
* @param generator A table generating function. The API for such functions is likely to change
@@ -294,7 +294,7 @@ class SchemaRDD(
new SchemaRDD(sqlContext, Generate(generator, join, outer, None, logicalPlan))
/**
- * EXPERIMENTAL API
+ * EXPERIMENTAL
* Adds the rows from this RDD to the specified table. Note in a standard [[SQLContext]] there is
* no notion of persistent tables, and thus queries that contain this operator will fail to
* optimize. When working with an extension of a SQLContext that has a persistent catalog, such