You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@predictionio.apache.org by sh...@apache.org on 2019/06/04 02:15:27 UTC

[predictionio] branch develop updated: [PIO-208] Fix examples according to the latest templates (#515)

This is an automated email from the ASF dual-hosted git repository.

shimamoto pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/predictionio.git


The following commit(s) were added to refs/heads/develop by this push:
     new 93f5aa6  [PIO-208] Fix examples according to the latest templates (#515)
93f5aa6 is described below

commit 93f5aa616defaa13bd6fb6d9580486ff4bfb9e3e
Author: Naoki Takezoe <ta...@apache.org>
AuthorDate: Tue Jun 4 11:15:20 2019 +0900

    [PIO-208] Fix examples according to the latest templates (#515)
    
    * Fix links to the document
    
    * Mark override methods
---
 examples/scala-parallel-classification/README.md                        | 2 +-
 .../add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala              | 2 ++
 .../add-algorithm/src/main/scala/PrecisionEvaluation.scala              | 1 +
 .../add-algorithm/src/main/scala/Preparator.scala                       | 1 +
 .../add-algorithm/src/main/scala/RandomForestAlgorithm.scala            | 2 ++
 .../reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala  | 2 ++
 .../reading-custom-properties/src/main/scala/PrecisionEvaluation.scala  | 1 +
 .../reading-custom-properties/src/main/scala/Preparator.scala           | 1 +
 examples/scala-parallel-ecommercerecommendation/README.md               | 2 +-
 .../adjust-score/src/main/scala/ECommAlgorithm.scala                    | 2 ++
 .../adjust-score/src/main/scala/Preparator.scala                        | 1 +
 examples/scala-parallel-recommendation/README.md                        | 2 +-
 .../blacklist-items/src/main/scala/ALSAlgorithm.scala                   | 2 ++
 .../blacklist-items/src/main/scala/ALSModel.scala                       | 1 +
 .../blacklist-items/src/main/scala/Evaluation.scala                     | 2 ++
 .../blacklist-items/src/main/scala/Preparator.scala                     | 1 +
 .../customize-data-prep/src/main/scala/ALSAlgorithm.scala               | 2 ++
 .../customize-data-prep/src/main/scala/ALSModel.scala                   | 1 +
 .../customize-data-prep/src/main/scala/Evaluation.scala                 | 2 ++
 .../customize-data-prep/src/main/scala/Preparator.scala                 | 1 +
 .../customize-serving/src/main/scala/ALSAlgorithm.scala                 | 2 ++
 .../customize-serving/src/main/scala/ALSModel.scala                     | 1 +
 .../customize-serving/src/main/scala/Evaluation.scala                   | 2 ++
 .../customize-serving/src/main/scala/Preparator.scala                   | 1 +
 .../reading-custom-events/src/main/scala/ALSAlgorithm.scala             | 2 ++
 .../reading-custom-events/src/main/scala/ALSModel.scala                 | 1 +
 .../reading-custom-events/src/main/scala/Evaluation.scala               | 2 ++
 .../reading-custom-events/src/main/scala/Preparator.scala               | 1 +
 .../train-with-view-event/src/main/scala/ALSAlgorithm.scala             | 2 ++
 .../train-with-view-event/src/main/scala/ALSModel.scala                 | 1 +
 .../train-with-view-event/src/main/scala/Evaluation.scala               | 2 ++
 .../train-with-view-event/src/main/scala/Preparator.scala               | 1 +
 examples/scala-parallel-similarproduct/README.md                        | 2 +-
 .../multi-events-multi-algos/src/main/scala/ALSAlgorithm.scala          | 2 ++
 .../multi-events-multi-algos/src/main/scala/CooccurrenceAlgorithm.scala | 2 ++
 .../multi-events-multi-algos/src/main/scala/Preparator.scala            | 1 +
 .../recommended-user/src/main/scala/ALSAlgorithm.scala                  | 2 ++
 .../recommended-user/src/main/scala/Preparator.scala                    | 1 +
 .../return-item-properties/src/main/scala/ALSAlgorithm.scala            | 2 ++
 .../return-item-properties/src/main/scala/CooccurrenceAlgorithm.scala   | 2 ++
 .../return-item-properties/src/main/scala/Preparator.scala              | 1 +
 .../rid-user-set-event/src/main/scala/ALSAlgorithm.scala                | 2 ++
 .../rid-user-set-event/src/main/scala/CooccurrenceAlgorithm.scala       | 2 ++
 .../rid-user-set-event/src/main/scala/Preparator.scala                  | 1 +
 .../train-with-rate-event/src/main/scala/ALSAlgorithm.scala             | 2 ++
 .../train-with-rate-event/src/main/scala/CooccurrenceAlgorithm.scala    | 2 ++
 .../train-with-rate-event/src/main/scala/Preparator.scala               | 1 +
 47 files changed, 70 insertions(+), 4 deletions(-)

diff --git a/examples/scala-parallel-classification/README.md b/examples/scala-parallel-classification/README.md
index 96735ba..a19ee42 100644
--- a/examples/scala-parallel-classification/README.md
+++ b/examples/scala-parallel-classification/README.md
@@ -17,4 +17,4 @@ limitations under the License.
 
 This is based on Classification Engine Template v0.14.0.
 
-Please refer to http://predictionio.apache.org/templates/classification/how-to/
+Please refer to https://predictionio.apache.org/templates/classification/how-to/
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
index 0ac5e5b..4c86bdc 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala
@@ -37,6 +37,7 @@ class NaiveBayesAlgorithm(val ap: AlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): NaiveBayesModel = {
     // MLLib NaiveBayes cannot handle empty training data.
     require(data.labeledPoints.take(1).nonEmpty,
@@ -47,6 +48,7 @@ class NaiveBayesAlgorithm(val ap: AlgorithmParams)
     NaiveBayes.train(data.labeledPoints, ap.lambda)
   }
 
+  override
   def predict(model: NaiveBayesModel, query: Query): PredictedResult = {
     val label = model.predict(Vectors.dense(
       Array(query.attr0, query.attr1, query.attr2)
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/PrecisionEvaluation.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/PrecisionEvaluation.scala
index 019e2d8..addb21c 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/PrecisionEvaluation.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/PrecisionEvaluation.scala
@@ -25,6 +25,7 @@ case class Precision(label: Double)
   extends OptionAverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header: String = s"Precision(label = $label)"
 
+  override
   def calculate(query: Query, predicted: PredictedResult, actual: ActualResult)
   : Option[Double] = {
     if (predicted.label == label) {
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
index 20d8f8c..7314906 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/Preparator.scala
@@ -29,6 +29,7 @@ class PreparedData(
 
 class Preparator extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(trainingData.labeledPoints)
   }
diff --git a/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala b/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
index 76dd7ca..f28d954 100644
--- a/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
+++ b/examples/scala-parallel-classification/add-algorithm/src/main/scala/RandomForestAlgorithm.scala
@@ -42,6 +42,7 @@ class RandomForestAlgorithm(val ap: RandomForestAlgorithmParams) // CHANGED
   Query, PredictedResult] {
 
   // CHANGED
+  override
   def train(sc: SparkContext, data: PreparedData): RandomForestModel = {
     // CHANGED
     // Empty categoricalFeaturesInfo indicates all features are continuous.
@@ -57,6 +58,7 @@ class RandomForestAlgorithm(val ap: RandomForestAlgorithmParams) // CHANGED
       ap.maxBins)
   }
 
+  override
   def predict(
     model: RandomForestModel, // CHANGED
     query: Query): PredictedResult = {
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala
index 6625551..8ee2f53 100644
--- a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/NaiveBayesAlgorithm.scala
@@ -37,6 +37,7 @@ class NaiveBayesAlgorithm(val ap: AlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): NaiveBayesModel = {
     // MLLib NaiveBayes cannot handle empty training data.
     require(data.labeledPoints.take(1).nonEmpty,
@@ -47,6 +48,7 @@ class NaiveBayesAlgorithm(val ap: AlgorithmParams)
     NaiveBayes.train(data.labeledPoints, ap.lambda)
   }
 
+  override
   def predict(model: NaiveBayesModel, query: Query): PredictedResult = {
     val label = model.predict(Vectors.dense(
       // MODIFIED
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala
index 019e2d8..addb21c 100644
--- a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/PrecisionEvaluation.scala
@@ -25,6 +25,7 @@ case class Precision(label: Double)
   extends OptionAverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header: String = s"Precision(label = $label)"
 
+  override
   def calculate(query: Query, predicted: PredictedResult, actual: ActualResult)
   : Option[Double] = {
     if (predicted.label == label) {
diff --git a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala
index 20d8f8c..7314906 100644
--- a/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-classification/reading-custom-properties/src/main/scala/Preparator.scala
@@ -29,6 +29,7 @@ class PreparedData(
 
 class Preparator extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(trainingData.labeledPoints)
   }
diff --git a/examples/scala-parallel-ecommercerecommendation/README.md b/examples/scala-parallel-ecommercerecommendation/README.md
index b80c928..60ff560 100644
--- a/examples/scala-parallel-ecommercerecommendation/README.md
+++ b/examples/scala-parallel-ecommercerecommendation/README.md
@@ -17,4 +17,4 @@ limitations under the License.
 
 This is based on E-Commerce Recommendation Template v0.14.0.
 
-Please refer to http://predictionio.apache.org/templates/ecommercerecommendation/how-to/
+Please refer to https://predictionio.apache.org/templates/ecommercerecommendation/how-to/
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala
index d63b090..b2643ea 100644
--- a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/ECommAlgorithm.scala
@@ -87,6 +87,7 @@ class ECommAlgorithm(val ap: ECommAlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): ECommModel = {
     require(!data.viewEvents.take(1).isEmpty,
       s"viewEvents in PreparedData cannot be empty." +
@@ -239,6 +240,7 @@ class ECommAlgorithm(val ap: ECommAlgorithmParams)
     buyCountsRDD.collectAsMap.toMap
   }
 
+  override
   def predict(model: ECommModel, query: Query): PredictedResult = {
 
     val userFeatures = model.userFeatures
diff --git a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala
index 585aaea..7862add 100644
--- a/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-ecommercerecommendation/adjust-score/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(
       users = trainingData.users,
diff --git a/examples/scala-parallel-recommendation/README.md b/examples/scala-parallel-recommendation/README.md
index bd99531..ab1c2ec 100644
--- a/examples/scala-parallel-recommendation/README.md
+++ b/examples/scala-parallel-recommendation/README.md
@@ -17,4 +17,4 @@ limitations under the License.
 
 This is based on Recommendation Template v0.14.0.
 
-Please refer to http://predictionio.apache.org/templates/recommendation/how-to/
+Please refer to https://predictionio.apache.org/templates/recommendation/how-to/
diff --git a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSAlgorithm.scala
index d500d67..c155b53 100644
--- a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSAlgorithm.scala
@@ -48,6 +48,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       s"To remedy it, set lower numIterations or checkpoint parameters.")
   }
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     // MLLib ALS cannot handle empty training data.
     require(!data.ratings.take(1).isEmpty,
@@ -92,6 +93,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       itemStringIntMap = itemStringIntMap)
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
     // Convert String ID to Int index for Mllib
     model.userStringIntMap.get(query.user).map { userInt =>
diff --git a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSModel.scala
index f3c881e..ec459f3 100644
--- a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/ALSModel.scala
@@ -59,6 +59,7 @@ class ALSModel(
     scored.top(num)(Ordering.by(_._2))
   }
 
+  override
   def save(id: String, params: ALSAlgorithmParams,
     sc: SparkContext): Boolean = {
 
diff --git a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Evaluation.scala b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Evaluation.scala
index a665496..3f1dc78 100644
--- a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Evaluation.scala
+++ b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Evaluation.scala
@@ -35,6 +35,7 @@ case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
 
   override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
     val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
 
@@ -53,6 +54,7 @@ case class PositiveCount(ratingThreshold: Double = 2.0)
     extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header = s"PositiveCount (threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
     a.ratings.filter(_.rating >= ratingThreshold).size
   }
diff --git a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Preparator.scala
index 6a41c47..af7e744 100644
--- a/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/blacklist-items/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(ratings = trainingData.ratings)
   }
diff --git a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSAlgorithm.scala
index 65f2f15..13230fd 100644
--- a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSAlgorithm.scala
@@ -48,6 +48,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       s"To remedy it, set lower numIterations or checkpoint parameters.")
   }
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     // MLLib ALS cannot handle empty training data.
     require(!data.ratings.take(1).isEmpty,
@@ -92,6 +93,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       itemStringIntMap = itemStringIntMap)
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
     // Convert String ID to Int index for Mllib
     model.userStringIntMap.get(query.user).map { userInt =>
diff --git a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSModel.scala
index 898858d..164781c 100644
--- a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/ALSModel.scala
@@ -39,6 +39,7 @@ class ALSModel(
   extends MatrixFactorizationModel(rank, userFeatures, productFeatures)
   with PersistentModel[ALSAlgorithmParams] {
 
+  override
   def save(id: String, params: ALSAlgorithmParams,
     sc: SparkContext): Boolean = {
 
diff --git a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Evaluation.scala b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Evaluation.scala
index a665496..3f1dc78 100644
--- a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Evaluation.scala
+++ b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Evaluation.scala
@@ -35,6 +35,7 @@ case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
 
   override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
     val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
 
@@ -53,6 +54,7 @@ case class PositiveCount(ratingThreshold: Double = 2.0)
     extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header = s"PositiveCount (threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
     a.ratings.filter(_.rating >= ratingThreshold).size
   }
diff --git a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Preparator.scala
index cf792af..5c09b7a 100644
--- a/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/customize-data-prep/src/main/scala/Preparator.scala
@@ -34,6 +34,7 @@ case class CustomPreparatorParams(
 class Preparator(pp: CustomPreparatorParams) // ADDED CustomPreparatorParams
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     val noTrainItems = Source.fromFile(pp.filepath).getLines.toSet // CHANGED
     val ratings = trainingData.ratings.filter( r =>
diff --git a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSAlgorithm.scala
index 65f2f15..13230fd 100644
--- a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSAlgorithm.scala
@@ -48,6 +48,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       s"To remedy it, set lower numIterations or checkpoint parameters.")
   }
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     // MLLib ALS cannot handle empty training data.
     require(!data.ratings.take(1).isEmpty,
@@ -92,6 +93,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       itemStringIntMap = itemStringIntMap)
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
     // Convert String ID to Int index for Mllib
     model.userStringIntMap.get(query.user).map { userInt =>
diff --git a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSModel.scala
index 898858d..164781c 100644
--- a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/ALSModel.scala
@@ -39,6 +39,7 @@ class ALSModel(
   extends MatrixFactorizationModel(rank, userFeatures, productFeatures)
   with PersistentModel[ALSAlgorithmParams] {
 
+  override
   def save(id: String, params: ALSAlgorithmParams,
     sc: SparkContext): Boolean = {
 
diff --git a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Evaluation.scala b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Evaluation.scala
index a665496..3f1dc78 100644
--- a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Evaluation.scala
+++ b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Evaluation.scala
@@ -35,6 +35,7 @@ case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
 
   override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
     val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
 
@@ -53,6 +54,7 @@ case class PositiveCount(ratingThreshold: Double = 2.0)
     extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header = s"PositiveCount (threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
     a.ratings.filter(_.rating >= ratingThreshold).size
   }
diff --git a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Preparator.scala
index 6a41c47..af7e744 100644
--- a/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/customize-serving/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(ratings = trainingData.ratings)
   }
diff --git a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSAlgorithm.scala
index 65f2f15..13230fd 100644
--- a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSAlgorithm.scala
@@ -48,6 +48,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       s"To remedy it, set lower numIterations or checkpoint parameters.")
   }
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     // MLLib ALS cannot handle empty training data.
     require(!data.ratings.take(1).isEmpty,
@@ -92,6 +93,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       itemStringIntMap = itemStringIntMap)
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
     // Convert String ID to Int index for Mllib
     model.userStringIntMap.get(query.user).map { userInt =>
diff --git a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSModel.scala
index 898858d..164781c 100644
--- a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/ALSModel.scala
@@ -39,6 +39,7 @@ class ALSModel(
   extends MatrixFactorizationModel(rank, userFeatures, productFeatures)
   with PersistentModel[ALSAlgorithmParams] {
 
+  override
   def save(id: String, params: ALSAlgorithmParams,
     sc: SparkContext): Boolean = {
 
diff --git a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Evaluation.scala b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Evaluation.scala
index a665496..3f1dc78 100644
--- a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Evaluation.scala
+++ b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Evaluation.scala
@@ -35,6 +35,7 @@ case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
 
   override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
     val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
 
@@ -53,6 +54,7 @@ case class PositiveCount(ratingThreshold: Double = 2.0)
     extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header = s"PositiveCount (threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
     a.ratings.filter(_.rating >= ratingThreshold).size
   }
diff --git a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Preparator.scala
index 6a41c47..af7e744 100644
--- a/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/reading-custom-events/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(ratings = trainingData.ratings)
   }
diff --git a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSAlgorithm.scala
index 234aa0d..a555b69 100644
--- a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSAlgorithm.scala
@@ -48,6 +48,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       s"To remedy it, set lower numIterations or checkpoint parameters.")
   }
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     // MLLib ALS cannot handle empty training data.
     require(!data.ratings.take(1).isEmpty,
@@ -93,6 +94,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
       itemStringIntMap = itemStringIntMap)
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
     // Convert String ID to Int index for Mllib
     model.userStringIntMap.get(query.user).map { userInt =>
diff --git a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSModel.scala b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSModel.scala
index 898858d..164781c 100644
--- a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSModel.scala
+++ b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/ALSModel.scala
@@ -39,6 +39,7 @@ class ALSModel(
   extends MatrixFactorizationModel(rank, userFeatures, productFeatures)
   with PersistentModel[ALSAlgorithmParams] {
 
+  override
   def save(id: String, params: ALSAlgorithmParams,
     sc: SparkContext): Boolean = {
 
diff --git a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Evaluation.scala b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Evaluation.scala
index a665496..3f1dc78 100644
--- a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Evaluation.scala
+++ b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Evaluation.scala
@@ -35,6 +35,7 @@ case class PrecisionAtK(k: Int, ratingThreshold: Double = 2.0)
 
   override def header = s"Precision@K (k=$k, threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Option[Double] = {
     val positives: Set[String] = a.ratings.filter(_.rating >= ratingThreshold).map(_.item).toSet
 
@@ -53,6 +54,7 @@ case class PositiveCount(ratingThreshold: Double = 2.0)
     extends AverageMetric[EmptyEvaluationInfo, Query, PredictedResult, ActualResult] {
   override def header = s"PositiveCount (threshold=$ratingThreshold)"
 
+  override
   def calculate(q: Query, p: PredictedResult, a: ActualResult): Double = {
     a.ratings.filter(_.rating >= ratingThreshold).size
   }
diff --git a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Preparator.scala b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Preparator.scala
index 6a41c47..af7e744 100644
--- a/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-recommendation/train-with-view-event/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(ratings = trainingData.ratings)
   }
diff --git a/examples/scala-parallel-similarproduct/README.md b/examples/scala-parallel-similarproduct/README.md
index a1da18b..404bdac 100644
--- a/examples/scala-parallel-similarproduct/README.md
+++ b/examples/scala-parallel-similarproduct/README.md
@@ -17,4 +17,4 @@ limitations under the License.
 
 This is based on Similar Product Template v0.14.0.
 
-Please refer to http://predictionio.apache.org/templates/similarproduct/how-to/
+Please refer to https://predictionio.apache.org/templates/similarproduct/how-to/
diff --git a/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/ALSAlgorithm.scala
index 64d570c..618c99c 100644
--- a/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/ALSAlgorithm.scala
@@ -62,6 +62,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     require(!data.viewEvents.take(1).isEmpty,
       s"viewEvents in PreparedData cannot be empty." +
@@ -133,6 +134,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
     )
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
 
     val productFeatures = model.productFeatures
diff --git a/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/CooccurrenceAlgorithm.scala b/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/CooccurrenceAlgorithm.scala
index 76307e7..57844b6 100644
--- a/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/CooccurrenceAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/CooccurrenceAlgorithm.scala
@@ -44,6 +44,7 @@ class CooccurrenceModel(
 class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
   extends P2LAlgorithm[PreparedData, CooccurrenceModel, Query, PredictedResult] {
 
+  override
   def train(sc: SparkContext, data: PreparedData): CooccurrenceModel = {
 
     val itemStringIntMap = BiMap.stringInt(data.items.keys)
@@ -103,6 +104,7 @@ class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
     topCooccurrences
   }
 
+  override
   def predict(model: CooccurrenceModel, query: Query): PredictedResult = {
 
     // convert items to Int index
diff --git a/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/Preparator.scala
index f2e3fa5..c8fc56d 100644
--- a/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/multi-events-multi-algos/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(
       users = trainingData.users,
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
index fd84284..67bbff8 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/ALSAlgorithm.scala
@@ -58,6 +58,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     require(data.followEvents.take(1).nonEmpty,
       s"followEvents in PreparedData cannot be empty." +
@@ -125,6 +126,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
     )
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
 
     val similarUserFeatures = model.similarUserFeatures
diff --git a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
index a687fc1..efce8b6 100644
--- a/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/recommended-user/src/main/scala/Preparator.scala
@@ -24,6 +24,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(
       users = trainingData.users,
diff --git a/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/ALSAlgorithm.scala
index 3bf3402..b2ef125 100644
--- a/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/ALSAlgorithm.scala
@@ -62,6 +62,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     require(!data.viewEvents.take(1).isEmpty,
       s"viewEvents in PreparedData cannot be empty." +
@@ -133,6 +134,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
     )
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
 
     val productFeatures = model.productFeatures
diff --git a/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/CooccurrenceAlgorithm.scala b/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/CooccurrenceAlgorithm.scala
index 470d87d..e58eae8 100644
--- a/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/CooccurrenceAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/CooccurrenceAlgorithm.scala
@@ -44,6 +44,7 @@ class CooccurrenceModel(
 class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
   extends P2LAlgorithm[PreparedData, CooccurrenceModel, Query, PredictedResult] {
 
+  override
   def train(sc: SparkContext, data: PreparedData): CooccurrenceModel = {
 
     val itemStringIntMap = BiMap.stringInt(data.items.keys)
@@ -103,6 +104,7 @@ class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
     topCooccurrences
   }
 
+  override
   def predict(model: CooccurrenceModel, query: Query): PredictedResult = {
 
     // convert items to Int index
diff --git a/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/Preparator.scala
index ece997b..56b774e 100644
--- a/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/return-item-properties/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(
       users = trainingData.users,
diff --git a/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/ALSAlgorithm.scala
index 50c26b5..7e156ff 100644
--- a/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/ALSAlgorithm.scala
@@ -62,6 +62,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc: SparkContext, data: PreparedData): ALSModel = {
     require(!data.viewEvents.take(1).isEmpty,
       s"viewEvents in PreparedData cannot be empty." +
@@ -129,6 +130,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
     )
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
 
     val productFeatures = model.productFeatures
diff --git a/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/CooccurrenceAlgorithm.scala b/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/CooccurrenceAlgorithm.scala
index 76307e7..57844b6 100644
--- a/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/CooccurrenceAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/CooccurrenceAlgorithm.scala
@@ -44,6 +44,7 @@ class CooccurrenceModel(
 class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
   extends P2LAlgorithm[PreparedData, CooccurrenceModel, Query, PredictedResult] {
 
+  override
   def train(sc: SparkContext, data: PreparedData): CooccurrenceModel = {
 
     val itemStringIntMap = BiMap.stringInt(data.items.keys)
@@ -103,6 +104,7 @@ class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
     topCooccurrences
   }
 
+  override
   def predict(model: CooccurrenceModel, query: Query): PredictedResult = {
 
     // convert items to Int index
diff --git a/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/Preparator.scala
index 908b9b8..cc94dd9 100644
--- a/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/rid-user-set-event/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(
       items = trainingData.items,
diff --git a/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/ALSAlgorithm.scala b/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
index 507343e..6c2f28c 100644
--- a/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/ALSAlgorithm.scala
@@ -62,6 +62,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
 
   @transient lazy val logger = Logger[this.type]
 
+  override
   def train(sc:SparkContext ,data: PreparedData): ALSModel = {
     require(!data.rateEvents.take(1).isEmpty, // MODIFIED
       s"rateEvents in PreparedData cannot be empty." + // MODIFIED
@@ -141,6 +142,7 @@ class ALSAlgorithm(val ap: ALSAlgorithmParams)
     )
   }
 
+  override
   def predict(model: ALSModel, query: Query): PredictedResult = {
 
     val productFeatures = model.productFeatures
diff --git a/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/CooccurrenceAlgorithm.scala b/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/CooccurrenceAlgorithm.scala
index 0edc76e..63ac2b7 100644
--- a/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/CooccurrenceAlgorithm.scala
+++ b/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/CooccurrenceAlgorithm.scala
@@ -44,6 +44,7 @@ class CooccurrenceModel(
 class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
   extends P2LAlgorithm[PreparedData, CooccurrenceModel, Query, PredictedResult] {
 
+  override
   def train(sc: SparkContext, data: PreparedData): CooccurrenceModel = {
 
     val itemStringIntMap = BiMap.stringInt(data.items.keys)
@@ -104,6 +105,7 @@ class CooccurrenceAlgorithm(val ap: CooccurrenceAlgorithmParams)
     topCooccurrences
   }
 
+  override
   def predict(model: CooccurrenceModel, query: Query): PredictedResult = {
 
     // convert items to Int index
diff --git a/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/Preparator.scala b/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/Preparator.scala
index 187e423..4139bce 100644
--- a/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/Preparator.scala
+++ b/examples/scala-parallel-similarproduct/train-with-rate-event/src/main/scala/Preparator.scala
@@ -26,6 +26,7 @@ import org.apache.spark.rdd.RDD
 class Preparator
   extends PPreparator[TrainingData, PreparedData] {
 
+  override
   def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = {
     new PreparedData(
       users = trainingData.users,