You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by chiwanpark <gi...@git.apache.org> on 2016/06/01 01:44:11 UTC

[GitHub] flink pull request: [FLINK-1979] Add logistic loss, hinge loss and regulariz...

Github user chiwanpark commented on a diff in the pull request:

    https://github.com/apache/flink/pull/1985#discussion_r65291353
  
    --- Diff: flink-libraries/flink-ml/src/main/scala/org/apache/flink/ml/optimization/PartialLossFunction.scala ---
    @@ -47,21 +47,106 @@ object SquaredLoss extends PartialLossFunction {
     
       /** Calculates the loss depending on the label and the prediction
         *
    -    * @param prediction
    -    * @param label
    -    * @return
    +    * @param prediction The predicted value
    +    * @param label The true value
    +    * @return The loss
         */
       override def loss(prediction: Double, label: Double): Double = {
         0.5 * (prediction - label) * (prediction - label)
       }
     
       /** Calculates the derivative of the [[PartialLossFunction]]
         *
    -    * @param prediction
    -    * @param label
    -    * @return
    +    * @param prediction The predicted value
    +    * @param label The true value
    +    * @return The derivative of the loss function
         */
       override def derivative(prediction: Double, label: Double): Double = {
         (prediction - label)
       }
     }
    +
    +/** Logistic loss function which can be used with the [[GenericLossFunction]]
    +  *
    +  *
    +  * The [[LogisticLoss]] function implements `log(1 + -exp(prediction*label))`
    +  * for binary classification with label in {-1, 1}
    +  */
    +object LogisticLoss extends PartialLossFunction {
    +
    +  /** Calculates the loss depending on the label and the prediction
    +    *
    +    * @param prediction The predicted value
    +    * @param label The true value
    +    * @return The loss
    +    */
    +  override def loss(prediction: Double, label: Double): Double = {
    +    val z = prediction * label
    +
    +    // based on implementation in scikit-learn
    +    // approximately equal and saves the computation of the log
    +    if (z > 18) {
    +      return math.exp(-z)
    +    }
    +    else if (z < -18) {
    +      return -z
    +    }
    +
    +    math.log(1 + math.exp(-z))
    --- End diff --
    
    Using `return` is not recommended in Scala. Could you change this like following?
    
    ```scala
    if (z > 18) {
      math.exp(-z)
    } else if (z < -18) {
      -z
    } else {
      math.log(1 + math.exp(-z))
    }
    ```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---