You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2016/01/18 14:46:38 UTC

spark git commit: [SPARK-10264][DOCUMENTATION] Added @Since to ml.recomendation

Repository: spark
Updated Branches:
  refs/heads/master bc36b0f1a -> 233d6cee9


[SPARK-10264][DOCUMENTATION] Added @Since to ml.recomendation

I create new pr since original pr long time no update.
Please help to review.

srowen

Author: Tommy YU <tu...@163.com>

Closes #10756 from Wenpei/add_since_to_recomm.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/233d6cee
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/233d6cee
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/233d6cee

Branch: refs/heads/master
Commit: 233d6cee96bb4c1723a5ab36efd19fd6180d651c
Parents: bc36b0f
Author: Tommy YU <tu...@163.com>
Authored: Mon Jan 18 13:46:14 2016 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Mon Jan 18 13:46:14 2016 +0000

----------------------------------------------------------------------
 .../apache/spark/ml/recommendation/ALS.scala    | 33 ++++++++++++++++++--
 1 file changed, 30 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/233d6cee/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
index 472c185..1481c82 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
@@ -180,22 +180,27 @@ private[recommendation] trait ALSParams extends ALSModelParams with HasMaxIter w
  * @param itemFactors a DataFrame that stores item factors in two columns: `id` and `features`
  */
 @Experimental
+@Since("1.3.0")
 class ALSModel private[ml] (
-    override val uid: String,
-    val rank: Int,
+    @Since("1.4.0") override val uid: String,
+    @Since("1.4.0") val rank: Int,
     @transient val userFactors: DataFrame,
     @transient val itemFactors: DataFrame)
   extends Model[ALSModel] with ALSModelParams with MLWritable {
 
   /** @group setParam */
+  @Since("1.4.0")
   def setUserCol(value: String): this.type = set(userCol, value)
 
   /** @group setParam */
+  @Since("1.4.0")
   def setItemCol(value: String): this.type = set(itemCol, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setPredictionCol(value: String): this.type = set(predictionCol, value)
 
+  @Since("1.3.0")
   override def transform(dataset: DataFrame): DataFrame = {
     // Register a UDF for DataFrame, and then
     // create a new column named map(predictionCol) by running the predict UDF.
@@ -213,6 +218,7 @@ class ALSModel private[ml] (
         predict(userFactors("features"), itemFactors("features")).as($(predictionCol)))
   }
 
+  @Since("1.3.0")
   override def transformSchema(schema: StructType): StructType = {
     validateParams()
     SchemaUtils.checkColumnType(schema, $(userCol), IntegerType)
@@ -220,6 +226,7 @@ class ALSModel private[ml] (
     SchemaUtils.appendColumn(schema, $(predictionCol), FloatType)
   }
 
+  @Since("1.5.0")
   override def copy(extra: ParamMap): ALSModel = {
     val copied = new ALSModel(uid, rank, userFactors, itemFactors)
     copyValues(copied, extra).setParent(parent)
@@ -303,65 +310,83 @@ object ALSModel extends MLReadable[ALSModel] {
  * preferences rather than explicit ratings given to items.
  */
 @Experimental
-class ALS(override val uid: String) extends Estimator[ALSModel] with ALSParams
+@Since("1.3.0")
+class ALS(@Since("1.4.0") override val uid: String) extends Estimator[ALSModel] with ALSParams
   with DefaultParamsWritable {
 
   import org.apache.spark.ml.recommendation.ALS.Rating
 
+  @Since("1.4.0")
   def this() = this(Identifiable.randomUID("als"))
 
   /** @group setParam */
+  @Since("1.3.0")
   def setRank(value: Int): this.type = set(rank, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setNumUserBlocks(value: Int): this.type = set(numUserBlocks, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setNumItemBlocks(value: Int): this.type = set(numItemBlocks, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setImplicitPrefs(value: Boolean): this.type = set(implicitPrefs, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setAlpha(value: Double): this.type = set(alpha, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setUserCol(value: String): this.type = set(userCol, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setItemCol(value: String): this.type = set(itemCol, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setRatingCol(value: String): this.type = set(ratingCol, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setPredictionCol(value: String): this.type = set(predictionCol, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setMaxIter(value: Int): this.type = set(maxIter, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setRegParam(value: Double): this.type = set(regParam, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setNonnegative(value: Boolean): this.type = set(nonnegative, value)
 
   /** @group setParam */
+  @Since("1.4.0")
   def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
 
   /** @group setParam */
+  @Since("1.3.0")
   def setSeed(value: Long): this.type = set(seed, value)
 
   /**
    * Sets both numUserBlocks and numItemBlocks to the specific value.
    * @group setParam
    */
+  @Since("1.3.0")
   def setNumBlocks(value: Int): this.type = {
     setNumUserBlocks(value)
     setNumItemBlocks(value)
     this
   }
 
+  @Since("1.3.0")
   override def fit(dataset: DataFrame): ALSModel = {
     import dataset.sqlContext.implicits._
     val r = if ($(ratingCol) != "") col($(ratingCol)).cast(FloatType) else lit(1.0f)
@@ -381,10 +406,12 @@ class ALS(override val uid: String) extends Estimator[ALSModel] with ALSParams
     copyValues(model)
   }
 
+  @Since("1.3.0")
   override def transformSchema(schema: StructType): StructType = {
     validateAndTransformSchema(schema)
   }
 
+  @Since("1.5.0")
   override def copy(extra: ParamMap): ALS = defaultCopy(extra)
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org