You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2019/05/24 02:17:27 UTC

[GitHub] [spark] zhengruifeng commented on a change in pull request #24648: [SPARK-27777][ML] Eliminate uncessary sliding job in AreaUnderCurve

zhengruifeng commented on a change in pull request #24648: [SPARK-27777][ML] Eliminate uncessary sliding job in AreaUnderCurve
URL: https://github.com/apache/spark/pull/24648#discussion_r287194704
 
 

 ##########
 File path: mllib/src/main/scala/org/apache/spark/mllib/evaluation/AreaUnderCurve.scala
 ##########
 @@ -42,10 +41,40 @@ private[evaluation] object AreaUnderCurve {
    * @param curve an RDD of ordered 2D points stored in pairs representing a curve
    */
   def of(curve: RDD[(Double, Double)]): Double = {
-    curve.sliding(2).aggregate(0.0)(
-      seqOp = (auc: Double, points: Array[(Double, Double)]) => auc + trapezoid(points),
-      combOp = _ + _
-    )
+    val localAreas = curve.mapPartitions { iter =>
+      var localArea = 0.0
+      var firstPoint = Option.empty[(Double, Double)]
+      var lastPoint = Option.empty[(Double, Double)]
+
+      iter.sliding(2).foreach { points =>
+        if (firstPoint.isEmpty) {
+          firstPoint = Some(points.head)
+        }
+        lastPoint = Some(points.last)
+
+        if (points.length == 2) {
+          localArea += trapezoid(points)
+        }
+      }
+
+      if (firstPoint.nonEmpty) {
+        require(lastPoint.nonEmpty)
 
 Review comment:
   This donot fail on empty partitions or partition containing only one point.
   However, I will short-circuit these checks since it is simple

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org