spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From m...@apache.org
Subject spark git commit: [SPARK-10266][DOCUMENTATION, ML] Fixed @Since annotation for ml.tunning
Date Wed, 02 Dec 2015 22:16:10 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-1.6 5d915fed3 -> 911259e9a


[SPARK-10266][DOCUMENTATION, ML] Fixed @Since annotation for ml.tunning

cc mengxr noel-smith

I worked on this issues based on https://github.com/apache/spark/pull/8729.
ehsanmok  thank you for your contricution!

Author: Yu ISHIKAWA <yuu.ishikawa@gmail.com>
Author: Ehsan M.Kermani <ehsanmo1367@gmail.com>

Closes #9338 from yu-iskw/JIRA-10266.

(cherry picked from commit de07d06abecf3516c95d099b6c01a86e0c8cfd8c)
Signed-off-by: Xiangrui Meng <meng@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/911259e9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/911259e9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/911259e9

Branch: refs/heads/branch-1.6
Commit: 911259e9af6f9a81e775b1aa6d82fa44956bf993
Parents: 5d915fe
Author: Yu ISHIKAWA <yuu.ishikawa@gmail.com>
Authored: Wed Dec 2 14:15:54 2015 -0800
Committer: Xiangrui Meng <meng@databricks.com>
Committed: Wed Dec 2 14:16:05 2015 -0800

----------------------------------------------------------------------
 .../apache/spark/ml/tuning/CrossValidator.scala | 34 ++++++++++++++------
 .../spark/ml/tuning/ParamGridBuilder.scala      | 14 ++++++--
 .../spark/ml/tuning/TrainValidationSplit.scala  | 26 ++++++++++++---
 3 files changed, 58 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/911259e9/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala b/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala
index 83a9048..5c09f1a 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala
@@ -19,18 +19,18 @@ package org.apache.spark.ml.tuning
 
 import com.github.fommil.netlib.F2jBLAS
 import org.apache.hadoop.fs.Path
-import org.json4s.{JObject, DefaultFormats}
 import org.json4s.jackson.JsonMethods._
+import org.json4s.{DefaultFormats, JObject}
 
-import org.apache.spark.ml.classification.OneVsRestParams
-import org.apache.spark.ml.feature.RFormulaModel
-import org.apache.spark.{SparkContext, Logging}
+import org.apache.spark.{Logging, SparkContext}
 import org.apache.spark.annotation.{Experimental, Since}
 import org.apache.spark.ml._
+import org.apache.spark.ml.classification.OneVsRestParams
 import org.apache.spark.ml.evaluation.Evaluator
+import org.apache.spark.ml.feature.RFormulaModel
 import org.apache.spark.ml.param._
-import org.apache.spark.ml.util._
 import org.apache.spark.ml.util.DefaultParamsReader.Metadata
+import org.apache.spark.ml.util._
 import org.apache.spark.mllib.util.MLUtils
 import org.apache.spark.sql.DataFrame
 import org.apache.spark.sql.types.StructType
@@ -58,26 +58,34 @@ private[ml] trait CrossValidatorParams extends ValidatorParams {
  * :: Experimental ::
  * K-fold cross validation.
  */
+@Since("1.2.0")
 @Experimental
-class CrossValidator(override val uid: String) extends Estimator[CrossValidatorModel]
+class CrossValidator @Since("1.2.0") (@Since("1.4.0") override val uid: String)
+  extends Estimator[CrossValidatorModel]
   with CrossValidatorParams with MLWritable with Logging {
 
+  @Since("1.2.0")
   def this() = this(Identifiable.randomUID("cv"))
 
   private val f2jBLAS = new F2jBLAS
 
   /** @group setParam */
+  @Since("1.2.0")
   def setEstimator(value: Estimator[_]): this.type = set(estimator, value)
 
   /** @group setParam */
+  @Since("1.2.0")
   def setEstimatorParamMaps(value: Array[ParamMap]): this.type = set(estimatorParamMaps,
value)
 
   /** @group setParam */
+  @Since("1.2.0")
   def setEvaluator(value: Evaluator): this.type = set(evaluator, value)
 
   /** @group setParam */
+  @Since("1.2.0")
   def setNumFolds(value: Int): this.type = set(numFolds, value)
 
+  @Since("1.4.0")
   override def fit(dataset: DataFrame): CrossValidatorModel = {
     val schema = dataset.schema
     transformSchema(schema, logging = true)
@@ -116,10 +124,12 @@ class CrossValidator(override val uid: String) extends Estimator[CrossValidatorM
     copyValues(new CrossValidatorModel(uid, bestModel, metrics).setParent(this))
   }
 
+  @Since("1.4.0")
   override def transformSchema(schema: StructType): StructType = {
     $(estimator).transformSchema(schema)
   }
 
+  @Since("1.4.0")
   override def validateParams(): Unit = {
     super.validateParams()
     val est = $(estimator)
@@ -128,6 +138,7 @@ class CrossValidator(override val uid: String) extends Estimator[CrossValidatorM
     }
   }
 
+  @Since("1.4.0")
   override def copy(extra: ParamMap): CrossValidator = {
     val copied = defaultCopy(extra).asInstanceOf[CrossValidator]
     if (copied.isDefined(estimator)) {
@@ -308,26 +319,31 @@ object CrossValidator extends MLReadable[CrossValidator] {
  * @param avgMetrics Average cross-validation metrics for each paramMap in
  *                   [[CrossValidator.estimatorParamMaps]], in the corresponding order.
  */
+@Since("1.2.0")
 @Experimental
 class CrossValidatorModel private[ml] (
-    override val uid: String,
-    val bestModel: Model[_],
-    val avgMetrics: Array[Double])
+    @Since("1.4.0") override val uid: String,
+    @Since("1.2.0") val bestModel: Model[_],
+    @Since("1.5.0") val avgMetrics: Array[Double])
   extends Model[CrossValidatorModel] with CrossValidatorParams with MLWritable {
 
+  @Since("1.4.0")
   override def validateParams(): Unit = {
     bestModel.validateParams()
   }
 
+  @Since("1.4.0")
   override def transform(dataset: DataFrame): DataFrame = {
     transformSchema(dataset.schema, logging = true)
     bestModel.transform(dataset)
   }
 
+  @Since("1.4.0")
   override def transformSchema(schema: StructType): StructType = {
     bestModel.transformSchema(schema)
   }
 
+  @Since("1.4.0")
   override def copy(extra: ParamMap): CrossValidatorModel = {
     val copied = new CrossValidatorModel(
       uid,

http://git-wip-us.apache.org/repos/asf/spark/blob/911259e9/mllib/src/main/scala/org/apache/spark/ml/tuning/ParamGridBuilder.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/tuning/ParamGridBuilder.scala b/mllib/src/main/scala/org/apache/spark/ml/tuning/ParamGridBuilder.scala
index 98a8f03..b836d2a 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/tuning/ParamGridBuilder.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/tuning/ParamGridBuilder.scala
@@ -20,21 +20,23 @@ package org.apache.spark.ml.tuning
 import scala.annotation.varargs
 import scala.collection.mutable
 
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, Since}
 import org.apache.spark.ml.param._
 
 /**
  * :: Experimental ::
  * Builder for a param grid used in grid search-based model selection.
  */
+@Since("1.2.0")
 @Experimental
-class ParamGridBuilder {
+class ParamGridBuilder @Since("1.2.0") {
 
   private val paramGrid = mutable.Map.empty[Param[_], Iterable[_]]
 
   /**
    * Sets the given parameters in this grid to fixed values.
    */
+  @Since("1.2.0")
   def baseOn(paramMap: ParamMap): this.type = {
     baseOn(paramMap.toSeq: _*)
     this
@@ -43,6 +45,7 @@ class ParamGridBuilder {
   /**
    * Sets the given parameters in this grid to fixed values.
    */
+  @Since("1.2.0")
   @varargs
   def baseOn(paramPairs: ParamPair[_]*): this.type = {
     paramPairs.foreach { p =>
@@ -54,6 +57,7 @@ class ParamGridBuilder {
   /**
    * Adds a param with multiple values (overwrites if the input param exists).
    */
+  @Since("1.2.0")
   def addGrid[T](param: Param[T], values: Iterable[T]): this.type = {
     paramGrid.put(param, values)
     this
@@ -64,6 +68,7 @@ class ParamGridBuilder {
   /**
    * Adds a double param with multiple values.
    */
+  @Since("1.2.0")
   def addGrid(param: DoubleParam, values: Array[Double]): this.type = {
     addGrid[Double](param, values)
   }
@@ -71,6 +76,7 @@ class ParamGridBuilder {
   /**
    * Adds a int param with multiple values.
    */
+  @Since("1.2.0")
   def addGrid(param: IntParam, values: Array[Int]): this.type = {
     addGrid[Int](param, values)
   }
@@ -78,6 +84,7 @@ class ParamGridBuilder {
   /**
    * Adds a float param with multiple values.
    */
+  @Since("1.2.0")
   def addGrid(param: FloatParam, values: Array[Float]): this.type = {
     addGrid[Float](param, values)
   }
@@ -85,6 +92,7 @@ class ParamGridBuilder {
   /**
    * Adds a long param with multiple values.
    */
+  @Since("1.2.0")
   def addGrid(param: LongParam, values: Array[Long]): this.type = {
     addGrid[Long](param, values)
   }
@@ -92,6 +100,7 @@ class ParamGridBuilder {
   /**
    * Adds a boolean param with true and false.
    */
+  @Since("1.2.0")
   def addGrid(param: BooleanParam): this.type = {
     addGrid[Boolean](param, Array(true, false))
   }
@@ -99,6 +108,7 @@ class ParamGridBuilder {
   /**
    * Builds and returns all combinations of parameters specified by the param grid.
    */
+  @Since("1.2.0")
   def build(): Array[ParamMap] = {
     var paramMaps = Array(new ParamMap)
     paramGrid.foreach { case (param, values) =>

http://git-wip-us.apache.org/repos/asf/spark/blob/911259e9/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala b/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala
index 73a14b8..adf0630 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.ml.tuning
 
 import org.apache.spark.Logging
-import org.apache.spark.annotation.Experimental
+import org.apache.spark.annotation.{Experimental, Since}
 import org.apache.spark.ml.evaluation.Evaluator
 import org.apache.spark.ml.{Estimator, Model}
 import org.apache.spark.ml.param.{DoubleParam, ParamMap, ParamValidators}
@@ -51,24 +51,32 @@ private[ml] trait TrainValidationSplitParams extends ValidatorParams {
  * and uses evaluation metric on the validation set to select the best model.
  * Similar to [[CrossValidator]], but only splits the set once.
  */
+@Since("1.5.0")
 @Experimental
-class TrainValidationSplit(override val uid: String) extends Estimator[TrainValidationSplitModel]
+class TrainValidationSplit @Since("1.5.0") (@Since("1.5.0") override val uid: String)
+  extends Estimator[TrainValidationSplitModel]
   with TrainValidationSplitParams with Logging {
 
+  @Since("1.5.0")
   def this() = this(Identifiable.randomUID("tvs"))
 
   /** @group setParam */
+  @Since("1.5.0")
   def setEstimator(value: Estimator[_]): this.type = set(estimator, value)
 
   /** @group setParam */
+  @Since("1.5.0")
   def setEstimatorParamMaps(value: Array[ParamMap]): this.type = set(estimatorParamMaps,
value)
 
   /** @group setParam */
+  @Since("1.5.0")
   def setEvaluator(value: Evaluator): this.type = set(evaluator, value)
 
   /** @group setParam */
+  @Since("1.5.0")
   def setTrainRatio(value: Double): this.type = set(trainRatio, value)
 
+  @Since("1.5.0")
   override def fit(dataset: DataFrame): TrainValidationSplitModel = {
     val schema = dataset.schema
     transformSchema(schema, logging = true)
@@ -108,10 +116,12 @@ class TrainValidationSplit(override val uid: String) extends Estimator[TrainVali
     copyValues(new TrainValidationSplitModel(uid, bestModel, metrics).setParent(this))
   }
 
+  @Since("1.5.0")
   override def transformSchema(schema: StructType): StructType = {
     $(estimator).transformSchema(schema)
   }
 
+  @Since("1.5.0")
   override def validateParams(): Unit = {
     super.validateParams()
     val est = $(estimator)
@@ -120,6 +130,7 @@ class TrainValidationSplit(override val uid: String) extends Estimator[TrainVali
     }
   }
 
+  @Since("1.5.0")
   override def copy(extra: ParamMap): TrainValidationSplit = {
     val copied = defaultCopy(extra).asInstanceOf[TrainValidationSplit]
     if (copied.isDefined(estimator)) {
@@ -140,26 +151,31 @@ class TrainValidationSplit(override val uid: String) extends Estimator[TrainVali
  * @param bestModel Estimator determined best model.
  * @param validationMetrics Evaluated validation metrics.
  */
+@Since("1.5.0")
 @Experimental
 class TrainValidationSplitModel private[ml] (
-    override val uid: String,
-    val bestModel: Model[_],
-    val validationMetrics: Array[Double])
+    @Since("1.5.0") override val uid: String,
+    @Since("1.5.0") val bestModel: Model[_],
+    @Since("1.5.0") val validationMetrics: Array[Double])
   extends Model[TrainValidationSplitModel] with TrainValidationSplitParams {
 
+  @Since("1.5.0")
   override def validateParams(): Unit = {
     bestModel.validateParams()
   }
 
+  @Since("1.5.0")
   override def transform(dataset: DataFrame): DataFrame = {
     transformSchema(dataset.schema, logging = true)
     bestModel.transform(dataset)
   }
 
+  @Since("1.5.0")
   override def transformSchema(schema: StructType): StructType = {
     bestModel.transformSchema(schema)
   }
 
+  @Since("1.5.0")
   override def copy(extra: ParamMap): TrainValidationSplitModel = {
     val copied = new TrainValidationSplitModel (
       uid,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message