You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2016/08/08 22:54:11 UTC

spark git commit: [SPARK-16779][TRIVIAL] Avoid using postfix operators where they do not add much and remove whitelisting

Repository: spark
Updated Branches:
  refs/heads/master 865023905 -> 9216901d5


[SPARK-16779][TRIVIAL] Avoid using postfix operators where they do not add much and remove whitelisting

## What changes were proposed in this pull request?

Avoid using postfix operation for command execution in SQLQuerySuite where it wasn't whitelisted and audit existing whitelistings removing postfix operators from most places. Some notable places where postfix operation remains is in the XML parsing & time units (seconds, millis, etc.) where it arguably can improve readability.

## How was this patch tested?

Existing tests.

Author: Holden Karau <ho...@us.ibm.com>

Closes #14407 from holdenk/SPARK-16779.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9216901d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9216901d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9216901d

Branch: refs/heads/master
Commit: 9216901d52c9c763bfb908013587dcf5e781f15b
Parents: 8650239
Author: Holden Karau <ho...@us.ibm.com>
Authored: Mon Aug 8 15:54:03 2016 -0700
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Mon Aug 8 15:54:03 2016 -0700

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala | 1 -
 .../scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala     | 1 -
 core/src/main/scala/org/apache/spark/util/RpcUtils.scala         | 2 --
 .../src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala | 1 -
 .../org/apache/spark/deploy/history/ApplicationCacheSuite.scala  | 1 -
 core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala     | 4 ++--
 .../test/scala/org/apache/spark/storage/MemoryStoreSuite.scala   | 1 -
 .../org/apache/spark/streaming/kafka010/KafkaTestUtils.scala     | 2 --
 .../scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala  | 2 --
 graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala | 3 +--
 .../main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala | 1 -
 .../scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala   | 1 -
 .../src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala | 2 --
 .../src/test/scala/org/apache/spark/sql/DataFrameSuite.scala     | 1 -
 .../test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala | 2 --
 .../src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala  | 2 --
 .../test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala  | 2 --
 sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala  | 2 --
 .../org/apache/spark/sql/hive/execution/SQLQuerySuite.scala      | 3 ++-
 .../scala/org/apache/spark/streaming/InputStreamsSuite.scala     | 1 -
 .../org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala  | 4 +---
 21 files changed, 6 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 90c71cc..671e8e4 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -25,7 +25,6 @@ import java.util.{Arrays, Comparator, Date}
 
 import scala.collection.JavaConverters._
 import scala.concurrent.duration._
-import scala.language.postfixOps
 import scala.util.control.NonFatal
 
 import com.google.common.primitives.Longs

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 2ce49ca..dc05e76 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -25,7 +25,6 @@ import java.util.concurrent.atomic.AtomicLong
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 import scala.collection.mutable.HashSet
-import scala.language.postfixOps
 import scala.util.Random
 
 import org.apache.spark._

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/RpcUtils.scala b/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
index 2bb8de5..e3b5883 100644
--- a/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/RpcUtils.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.util
 
-import scala.language.postfixOps
-
 import org.apache.spark.SparkConf
 import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcEnv, RpcTimeout}
 

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
index 5e2ba31..5f59c17 100644
--- a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
@@ -22,7 +22,6 @@ import java.util.concurrent.{ExecutorService, TimeUnit}
 import scala.collection.Map
 import scala.collection.mutable
 import scala.concurrent.duration._
-import scala.language.postfixOps
 
 import org.mockito.Matchers
 import org.mockito.Matchers._

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
index 4ab000b..e3304be 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
@@ -23,7 +23,6 @@ import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
 
 import scala.collection.mutable
 import scala.collection.mutable.ListBuffer
-import scala.language.postfixOps
 
 import com.codahale.metrics.Counter
 import com.google.common.cache.LoadingCache

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
index 59b9097..387f3e2 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
@@ -21,7 +21,6 @@ import java.io.File
 
 import scala.collection.Map
 import scala.io.Codec
-import scala.language.postfixOps
 import scala.sys.process._
 import scala.util.Try
 
@@ -215,7 +214,8 @@ class PipedRDDSuite extends SparkFunSuite with SharedSparkContext {
   }
 
   def testCommandAvailable(command: String): Boolean = {
-    Try(Process(command) !!).isSuccess
+    val attempt = Try(Process(command).run().exitValue())
+    attempt.isSuccess && attempt.get == 0
   }
 
   def testExportInputFile(varName: String) {

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
index 145d432..c11de82 100644
--- a/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark.storage
 import java.nio.ByteBuffer
 
 import scala.language.implicitConversions
-import scala.language.postfixOps
 import scala.language.reflectiveCalls
 import scala.reflect.ClassTag
 

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala
index ecabe1c..e73823e 100644
--- a/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala
+++ b/external/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala
@@ -25,7 +25,6 @@ import java.util.concurrent.TimeoutException
 
 import scala.annotation.tailrec
 import scala.collection.JavaConverters._
-import scala.language.postfixOps
 import scala.util.control.NonFatal
 
 import kafka.admin.AdminUtils
@@ -279,4 +278,3 @@ private[kafka010] class KafkaTestUtils extends Logging {
     }
   }
 }
-

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala b/external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
index abfd7aa..03c9ca7 100644
--- a/external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
+++ b/external/kafka-0-8/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
@@ -25,7 +25,6 @@ import java.util.concurrent.TimeoutException
 
 import scala.annotation.tailrec
 import scala.collection.JavaConverters._
-import scala.language.postfixOps
 import scala.util.control.NonFatal
 
 import kafka.admin.AdminUtils
@@ -274,4 +273,3 @@ private[kafka] class KafkaTestUtils extends Logging {
     }
   }
 }
-

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala b/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
index 0a1622b..2f5bd4e 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.graphx.lib
 
-import scala.language.postfixOps
 import scala.reflect.ClassTag
 
 import org.apache.spark.graphx._
@@ -109,7 +108,7 @@ object PageRank extends Logging {
     require(resetProb >= 0 && resetProb <= 1, s"Random reset probability must belong" +
       s" to [0, 1], but got ${resetProb}")
 
-    val personalized = srcId isDefined
+    val personalized = srcId.isDefined
     val src: VertexId = srcId.getOrElse(-1L)
 
     // Initialize the PageRank graph with each edge attribute having

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
index 898a09e..42c5bcd 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
@@ -19,7 +19,6 @@ package org.apache.spark.mllib.util
 
 import java.{util => ju}
 
-import scala.language.postfixOps
 import scala.util.Random
 
 import org.apache.spark.SparkContext

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
----------------------------------------------------------------------
diff --git a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
index 12e9856..3d622d4 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
@@ -27,7 +27,6 @@ import java.util
 import scala.concurrent.duration._
 import scala.io.Source
 import scala.language.implicitConversions
-import scala.language.postfixOps
 
 import com.google.common.io.Files
 import org.mockito.Matchers.anyString

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index e1675c9..a10c0e3 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql.types
 
-import scala.language.postfixOps
-
 import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.SparkFunSuite

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 62cfd24..499f318 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -21,7 +21,6 @@ import java.io.File
 import java.nio.charset.StandardCharsets
 import java.util.UUID
 
-import scala.language.postfixOps
 import scala.util.Random
 
 import org.scalatest.Matchers._

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/core/src/test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala
index ddc4dcd..b117fbd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetAggregatorSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql
 
-import scala.language.postfixOps
-
 import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
 import org.apache.spark.sql.expressions.Aggregator
 import org.apache.spark.sql.expressions.scalalang.typed

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
index ac9f6c2..8d5e964 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql
 
-import scala.language.postfixOps
-
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.test.SharedSQLContext
 

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala
index 6aa3d3f..f8d4c61 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetPrimitiveSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql
 
-import scala.language.postfixOps
-
 import org.apache.spark.sql.test.SharedSQLContext
 
 case class IntClass(value: Int)

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
index 8a756fd..88fb147 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
@@ -20,8 +20,6 @@ package org.apache.spark.sql
 import java.io.{Externalizable, ObjectInput, ObjectOutput}
 import java.sql.{Date, Timestamp}
 
-import scala.language.postfixOps
-
 import org.apache.spark.sql.catalyst.encoders.{OuterScopes, RowEncoder}
 import org.apache.spark.sql.catalyst.util.sideBySide
 import org.apache.spark.sql.execution.streaming.MemoryStream

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index b659325..e6fe47a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -1790,6 +1790,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
   }
 
   def testCommandAvailable(command: String): Boolean = {
-    Try(Process(command) !!).isSuccess
+    val attempt = Try(Process(command).run().exitValue())
+    attempt.isSuccess && attempt.get == 0
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
----------------------------------------------------------------------
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
index 00d506c..9ecfa48 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
@@ -25,7 +25,6 @@ import java.util.concurrent.atomic.AtomicInteger
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable
-import scala.language.postfixOps
 
 import com.google.common.io.Files
 import org.apache.hadoop.fs.Path

http://git-wip-us.apache.org/repos/asf/spark/blob/9216901d/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala
----------------------------------------------------------------------
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala
index a6a4fec..310a7a6 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/AMDelegationTokenRenewer.scala
@@ -19,8 +19,6 @@ package org.apache.spark.deploy.yarn
 import java.security.PrivilegedExceptionAction
 import java.util.concurrent.{Executors, TimeUnit}
 
-import scala.language.postfixOps
-
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.hadoop.security.UserGroupInformation
@@ -128,7 +126,7 @@ private[yarn] class AMDelegationTokenRenewer(
     try {
       val remoteFs = FileSystem.get(freshHadoopConf)
       val credentialsPath = new Path(credentialsFile)
-      val thresholdTime = System.currentTimeMillis() - (daysToKeepFiles days).toMillis
+      val thresholdTime = System.currentTimeMillis() - (daysToKeepFiles.days).toMillis
       hadoopUtil.listFilesSorted(
         remoteFs, credentialsPath.getParent,
         credentialsPath.getName, SparkHadoopUtil.SPARK_YARN_CREDS_TEMP_EXTENSION)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org