Skip to content

Commit 6b9f8e2

Browse files
committed
add Experimental annotation
1 parent 8773d0d commit 6b9f8e2

File tree

15 files changed

+34
-4
lines changed

15 files changed

+34
-4
lines changed

mllib/src/main/scala/org/apache/spark/mllib/classification/NaiveBayes.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark.mllib.classification
1919

2020
import breeze.linalg.{DenseMatrix => BDM, DenseVector => BDV, argmax => brzArgmax, sum => brzSum}
2121

22+
import org.apache.spark.annotation.Experimental
2223
import org.apache.spark.{Logging, SparkContext}
2324
import org.apache.spark.SparkContext._
2425
import org.apache.spark.mllib.linalg.Vector
@@ -36,6 +37,7 @@ import org.apache.spark.rdd.RDD
3637
* @param theta log of class conditional probabilities, whose dimension is C-by-D,
3738
* where D is number of features
3839
*/
40+
@Experimental
3941
class NaiveBayesModel(
4042
val labels: Array[Double],
4143
val pi: Array[Double],

mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import scala.collection.mutable.ArrayBuffer
2121

2222
import breeze.linalg.{DenseVector => BDV, Vector => BV, norm => breezeNorm}
2323

24+
import org.apache.spark.annotation.Experimental
2425
import org.apache.spark.{Logging, SparkContext}
2526
import org.apache.spark.SparkContext._
2627
import org.apache.spark.mllib.linalg.{Vector, Vectors}
@@ -400,6 +401,7 @@ object KMeans {
400401
/**
401402
* :: Experimental ::
402403
*/
404+
@Experimental
403405
def main(args: Array[String]) {
404406
if (args.length < 4) {
405407
println("Usage: KMeans <master> <input_file> <k> <max_iterations> [<runs>]")

mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ import scala.util.Sorting
2525
import com.esotericsoftware.kryo.Kryo
2626
import org.jblas.{DoubleMatrix, SimpleBlas, Solve}
2727

28+
import org.apache.spark.annotation.Experimental
2829
import org.apache.spark.broadcast.Broadcast
2930
import org.apache.spark.{Logging, HashPartitioner, Partitioner, SparkContext, SparkConf}
3031
import org.apache.spark.storage.StorageLevel
@@ -142,6 +143,7 @@ class ALS private (
142143
*
143144
* Sets the constant used in computing confidence in implicit ALS. Default: 1.0.
144145
*/
146+
@Experimental
145147
def setAlpha(alpha: Double): ALS = {
146148
this.alpha = alpha
147149
this

mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark.mllib.regression
1919

2020
import breeze.linalg.{DenseVector => BDV, SparseVector => BSV}
2121

22+
import org.apache.spark.annotation.Experimental
2223
import org.apache.spark.{Logging, SparkException}
2324
import org.apache.spark.rdd.RDD
2425
import org.apache.spark.mllib.optimization._
@@ -105,6 +106,7 @@ abstract class GeneralizedLinearAlgorithm[M <: GeneralizedLinearModel]
105106
*
106107
* Set if the algorithm should validate data before training. Default true.
107108
*/
109+
@Experimental
108110
def setValidateData(validateData: Boolean): this.type = {
109111
this.validateData = validateData
110112
this

mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark.mllib.tree
1919

2020
import scala.util.control.Breaks._
2121

22+
import org.apache.spark.annotation.Experimental
2223
import org.apache.spark.{Logging, SparkContext}
2324
import org.apache.spark.SparkContext._
2425
import org.apache.spark.mllib.regression.LabeledPoint
@@ -41,6 +42,7 @@ import org.apache.spark.mllib.linalg.{Vector, Vectors}
4142
* of algorithm (classification, regression, etc.), feature type (continuous,
4243
* categorical), depth of the tree, quantile calculation strategy, etc.
4344
*/
45+
@Experimental
4446
class DecisionTree (private val strategy: Strategy) extends Serializable with Logging {
4547

4648
/**

mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/Algo.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,14 @@
1717

1818
package org.apache.spark.mllib.tree.configuration
1919

20+
import org.apache.spark.annotation.Experimental
21+
2022
/**
2123
* :: Experimental ::
2224
*
2325
* Enum to select the algorithm for the decision tree
2426
*/
27+
@Experimental
2528
object Algo extends Enumeration {
2629
type Algo = Value
2730
val Classification, Regression = Value

mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/FeatureType.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,14 @@
1717

1818
package org.apache.spark.mllib.tree.configuration
1919

20+
import org.apache.spark.annotation.Experimental
21+
2022
/**
2123
* :: Experimental ::
2224
*
2325
* Enum to describe whether a feature is "continuous" or "categorical"
2426
*/
27+
@Experimental
2528
object FeatureType extends Enumeration {
2629
type FeatureType = Value
2730
val Continuous, Categorical = Value

mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/QuantileStrategy.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,14 @@
1717

1818
package org.apache.spark.mllib.tree.configuration
1919

20+
import org.apache.spark.annotation.Experimental
21+
2022
/**
2123
* :: Experimental ::
2224
*
2325
* Enum for selecting the quantile calculation strategy
2426
*/
27+
@Experimental
2528
object QuantileStrategy extends Enumeration {
2629
type QuantileStrategy = Value
2730
val Sort, MinMax, ApproxHist = Value

mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/Strategy.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.mllib.tree.configuration
1919

20+
import org.apache.spark.annotation.Experimental
2021
import org.apache.spark.mllib.tree.impurity.Impurity
2122
import org.apache.spark.mllib.tree.configuration.Algo._
2223
import org.apache.spark.mllib.tree.configuration.QuantileStrategy._
@@ -36,6 +37,7 @@ import org.apache.spark.mllib.tree.configuration.QuantileStrategy._
3637
* 1, 2, ... , k-1. It's important to note that features are
3738
* zero-indexed.
3839
*/
40+
@Experimental
3941
class Strategy (
4042
val algo: Algo,
4143
val impurity: Impurity,

mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,15 @@
1717

1818
package org.apache.spark.mllib.tree.impurity
1919

20-
import org.apache.spark.annotation.DeveloperApi
20+
import org.apache.spark.annotation.{DeveloperApi, Experimental}
2121

2222
/**
2323
* :: Experimental ::
2424
*
2525
* Class for calculating [[http://en.wikipedia.org/wiki/Binary_entropy_function entropy]] during
2626
* binary classification.
2727
*/
28+
@Experimental
2829
object Entropy extends Impurity {
2930

3031
private[tree] def log2(x: Double) = scala.math.log(x) / scala.math.log(2)

0 commit comments

Comments
 (0)