You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2015/06/03 19:39:08 UTC

[5/5] spark git commit: [SPARK-7558] Demarcate tests in unit-tests.log (1.3)

[SPARK-7558] Demarcate tests in unit-tests.log (1.3)

This includes the following commits:

original: 9eb222c
hotfix1: 8c99793
hotfix2: a4f2412
scalastyle check: 609c492

---
Original patch #6441
Branch-1.4 patch #6598

Author: Andrew Or <an...@databricks.com>

Closes #6602 from andrewor14/demarcate-tests-1.3 and squashes the following commits:

a75ff8f [Andrew Or] Fix hive-thrift server log4j problem
f782edd [Andrew Or] [SPARK-7558] Guard against direct uses of FunSuite / FunSuiteLike
2b7a4f4 [Andrew Or] Fix tests?
fec05c2 [Andrew Or] Fix tests
5342d50 [Andrew Or] Various whitespace changes (minor)
9af2756 [Andrew Or] Make all test suites extend SparkFunSuite instead of FunSuite
192a47c [Andrew Or] Fix log message
95ff5eb [Andrew Or] Add core tests as dependencies in all modules
8dffa0e [Andrew Or] Introduce base abstract class for all test suites


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e5747ee3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e5747ee3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e5747ee3

Branch: refs/heads/branch-1.3
Commit: e5747ee3abe3ccf7988042e2408492153ce19eea
Parents: bbd3772
Author: Andrew Or <an...@databricks.com>
Authored: Wed Jun 3 10:38:56 2015 -0700
Committer: Andrew Or <an...@databricks.com>
Committed: Wed Jun 3 10:38:56 2015 -0700

----------------------------------------------------------------------
 bagel/pom.xml                                   |  7 +++
 .../org/apache/spark/bagel/BagelSuite.scala     |  4 +-
 core/pom.xml                                    |  6 +++
 .../org/apache/spark/AccumulatorSuite.scala     |  3 +-
 .../org/apache/spark/CacheManagerSuite.scala    |  4 +-
 .../org/apache/spark/CheckpointSuite.scala      |  4 +-
 .../org/apache/spark/ContextCleanerSuite.scala  |  4 +-
 .../org/apache/spark/DistributedSuite.scala     |  3 +-
 .../scala/org/apache/spark/DriverSuite.scala    |  3 +-
 .../spark/ExecutorAllocationManagerSuite.scala  |  8 +++-
 .../scala/org/apache/spark/FailureSuite.scala   |  4 +-
 .../org/apache/spark/FileServerSuite.scala      |  3 +-
 .../test/scala/org/apache/spark/FileSuite.scala |  3 +-
 .../org/apache/spark/FutureActionSuite.scala    |  8 +++-
 .../apache/spark/ImplicitOrderingSuite.scala    | 30 ++++++------
 .../org/apache/spark/JobCancellationSuite.scala |  4 +-
 .../apache/spark/MapOutputTrackerSuite.scala    |  3 +-
 .../org/apache/spark/PartitioningSuite.scala    |  4 +-
 .../org/apache/spark/SSLOptionsSuite.scala      |  4 +-
 .../org/apache/spark/SecurityManagerSuite.scala |  4 +-
 .../scala/org/apache/spark/ShuffleSuite.scala   |  3 +-
 .../scala/org/apache/spark/SparkConfSuite.scala |  3 +-
 .../apache/spark/SparkContextInfoSuite.scala    |  4 +-
 .../SparkContextSchedulerCreationSuite.scala    |  4 +-
 .../org/apache/spark/SparkContextSuite.scala    |  4 +-
 .../scala/org/apache/spark/SparkFunSuite.scala  | 48 ++++++++++++++++++++
 .../org/apache/spark/StatusTrackerSuite.scala   |  6 +--
 .../scala/org/apache/spark/ThreadingSuite.scala |  4 +-
 .../scala/org/apache/spark/UnpersistSuite.scala |  3 +-
 .../spark/api/python/PythonBroadcastSuite.scala |  6 +--
 .../spark/api/python/PythonRDDSuite.scala       |  4 +-
 .../spark/api/python/SerDeUtilSuite.scala       |  6 +--
 .../apache/spark/broadcast/BroadcastSuite.scala |  6 +--
 .../org/apache/spark/deploy/ClientSuite.scala   |  5 +-
 .../apache/spark/deploy/CommandUtilsSuite.scala |  5 +-
 .../apache/spark/deploy/JsonProtocolSuite.scala |  5 +-
 .../spark/deploy/LogUrlsStandaloneSuite.scala   |  6 +--
 .../apache/spark/deploy/PythonRunnerSuite.scala |  4 +-
 .../apache/spark/deploy/SparkSubmitSuite.scala  |  8 +++-
 .../spark/deploy/SparkSubmitUtilsSuite.scala    |  6 ++-
 .../deploy/history/FsHistoryProviderSuite.scala |  6 +--
 .../deploy/history/HistoryServerSuite.scala     |  4 +-
 .../spark/deploy/master/MasterSuite.scala       |  6 +--
 .../deploy/rest/StandaloneRestSubmitSuite.scala |  4 +-
 .../deploy/rest/SubmitRestProtocolSuite.scala   |  5 +-
 .../spark/deploy/worker/DriverRunnerTest.scala  |  5 +-
 .../deploy/worker/ExecutorRunnerTest.scala      |  6 +--
 .../deploy/worker/WorkerArgumentsTest.scala     |  5 +-
 .../spark/deploy/worker/WorkerSuite.scala       |  6 +--
 .../deploy/worker/WorkerWatcherSuite.scala      |  5 +-
 .../spark/deploy/worker/ui/LogPageSuite.scala   |  6 ++-
 .../spark/executor/TaskMetricsSuite.scala       |  4 +-
 .../input/WholeTextFileRecordReaderSuite.scala  |  5 +-
 .../apache/spark/io/CompressionCodecSuite.scala |  6 +--
 .../spark/metrics/InputOutputMetricsSuite.scala |  6 +--
 .../spark/metrics/MetricsConfigSuite.scala      |  6 ++-
 .../spark/metrics/MetricsSystemSuite.scala      |  6 +--
 .../netty/NettyBlockTransferSecuritySuite.scala |  6 +--
 .../network/nio/ConnectionManagerSuite.scala    |  6 +--
 .../apache/spark/rdd/AsyncRDDActionsSuite.scala |  6 +--
 .../org/apache/spark/rdd/DoubleRDDSuite.scala   |  4 +-
 .../org/apache/spark/rdd/JdbcRDDSuite.scala     |  6 +--
 .../spark/rdd/PairRDDFunctionsSuite.scala       |  6 +--
 .../rdd/ParallelCollectionSplitSuite.scala      |  5 +-
 .../spark/rdd/PartitionPruningRDDSuite.scala    |  6 +--
 .../rdd/PartitionwiseSampledRDDSuite.scala      |  6 +--
 .../org/apache/spark/rdd/PipedRDDSuite.scala    |  3 +-
 .../scala/org/apache/spark/rdd/RDDSuite.scala   |  4 +-
 .../org/apache/spark/rdd/SortingSuite.scala     |  5 +-
 .../spark/rdd/ZippedPartitionsSuite.scala       |  5 +-
 .../CoarseGrainedSchedulerBackendSuite.scala    |  6 +--
 .../spark/scheduler/DAGSchedulerSuite.scala     |  8 +++-
 .../scheduler/EventLoggingListenerSuite.scala   |  5 +-
 .../apache/spark/scheduler/MapStatusSuite.scala |  5 +-
 .../OutputCommitCoordinatorSuite.scala          |  4 +-
 .../org/apache/spark/scheduler/PoolSuite.scala  |  6 +--
 .../spark/scheduler/ReplayListenerSuite.scala   |  6 +--
 .../spark/scheduler/SparkListenerSuite.scala    |  6 +--
 .../SparkListenerWithClusterSuite.scala         |  6 +--
 .../spark/scheduler/TaskContextSuite.scala      |  3 +-
 .../spark/scheduler/TaskResultGetterSuite.scala |  6 +--
 .../scheduler/TaskSchedulerImplSuite.scala      |  4 +-
 .../spark/scheduler/TaskSetManagerSuite.scala   |  4 +-
 .../mesos/MesosSchedulerBackendSuite.scala      |  5 +-
 .../mesos/MesosTaskLaunchDataSuite.scala        |  5 +-
 .../KryoSerializerDistributedSuite.scala        |  5 +-
 .../KryoSerializerResizableOutputSuite.scala    |  6 +--
 .../spark/serializer/KryoSerializerSuite.scala  |  5 +-
 .../ProactiveClosureSerializationSuite.scala    |  6 +--
 .../serializer/SerializationDebuggerSuite.scala |  6 ++-
 .../shuffle/ShuffleMemoryManagerSuite.scala     |  5 +-
 .../shuffle/hash/HashShuffleManagerSuite.scala  |  6 +--
 .../org/apache/spark/storage/BlockIdSuite.scala |  4 +-
 .../storage/BlockManagerReplicationSuite.scala  |  6 +--
 .../spark/storage/BlockManagerSuite.scala       |  4 +-
 .../spark/storage/BlockObjectWriterSuite.scala  |  5 +-
 .../spark/storage/DiskBlockManagerSuite.scala   |  6 +--
 .../spark/storage/FlatmapIteratorSuite.scala    |  5 +-
 .../apache/spark/storage/LocalDirsSuite.scala   |  5 +-
 .../ShuffleBlockFetcherIteratorSuite.scala      |  5 +-
 .../storage/StorageStatusListenerSuite.scala    |  5 +-
 .../org/apache/spark/storage/StorageSuite.scala |  4 +-
 .../org/apache/spark/ui/UISeleniumSuite.scala   |  2 +-
 .../scala/org/apache/spark/ui/UISuite.scala     |  5 +-
 .../ui/jobs/JobProgressListenerSuite.scala      |  3 +-
 .../spark/ui/storage/StorageTabSuite.scala      |  6 +--
 .../org/apache/spark/util/AkkaUtilsSuite.scala  |  4 +-
 .../apache/spark/util/ClosureCleanerSuite.scala |  6 +--
 .../spark/util/CompletionIteratorSuite.scala    |  4 +-
 .../apache/spark/util/DistributionSuite.scala   |  5 +-
 .../org/apache/spark/util/EventLoopSuite.scala  |  5 +-
 .../apache/spark/util/FileAppenderSuite.scala   |  6 +--
 .../apache/spark/util/JsonProtocolSuite.scala   |  3 +-
 .../spark/util/MutableURLClassLoaderSuite.scala |  6 +--
 .../apache/spark/util/NextIteratorSuite.scala   |  5 +-
 .../spark/util/ResetSystemProperties.scala      |  4 +-
 .../apache/spark/util/SizeEstimatorSuite.scala  |  9 +++-
 .../spark/util/TimeStampedHashMapSuite.scala    |  4 +-
 .../org/apache/spark/util/UtilsSuite.scala      |  5 +-
 .../org/apache/spark/util/VectorSuite.scala     |  4 +-
 .../util/collection/AppendOnlyMapSuite.scala    |  4 +-
 .../spark/util/collection/BitSetSuite.scala     |  4 +-
 .../util/collection/CompactBufferSuite.scala    |  4 +-
 .../collection/ExternalAppendOnlyMapSuite.scala |  4 +-
 .../util/collection/ExternalSorterSuite.scala   |  4 +-
 .../util/collection/OpenHashMapSuite.scala      |  4 +-
 .../util/collection/OpenHashSetSuite.scala      |  4 +-
 .../PrimitiveKeyOpenHashMapSuite.scala          |  4 +-
 .../util/collection/PrimitiveVectorSuite.scala  |  5 +-
 .../util/collection/SizeTrackerSuite.scala      |  5 +-
 .../spark/util/collection/SorterSuite.scala     |  5 +-
 .../io/ByteArrayChunkOutputStreamSuite.scala    |  4 +-
 .../spark/util/random/RandomSamplerSuite.scala  |  6 ++-
 .../spark/util/random/SamplingUtilsSuite.scala  |  5 +-
 .../spark/util/random/XORShiftRandomSuite.scala |  4 +-
 .../streaming/flume/sink/SparkSinkSuite.scala   |  9 ++++
 external/flume/pom.xml                          |  7 +++
 .../flume/FlumePollingStreamSuite.scala         |  6 +--
 .../streaming/flume/FlumeStreamSuite.scala      |  6 +--
 external/kafka/pom.xml                          |  7 +++
 .../streaming/kafka/KafkaStreamSuite.scala      |  6 +--
 external/mqtt/pom.xml                           |  7 +++
 .../spark/streaming/mqtt/MQTTStreamSuite.scala  |  6 +--
 external/twitter/pom.xml                        |  7 +++
 .../streaming/twitter/TwitterStreamSuite.scala  |  6 +--
 external/zeromq/pom.xml                         |  7 +++
 .../streaming/zeromq/ZeroMQStreamSuite.scala    |  4 +-
 graphx/pom.xml                                  |  7 +++
 .../org/apache/spark/graphx/EdgeRDDSuite.scala  |  5 +-
 .../org/apache/spark/graphx/EdgeSuite.scala     |  4 +-
 .../org/apache/spark/graphx/GraphOpsSuite.scala |  5 +-
 .../org/apache/spark/graphx/GraphSuite.scala    |  6 +--
 .../org/apache/spark/graphx/PregelSuite.scala   |  6 +--
 .../apache/spark/graphx/VertexRDDSuite.scala    |  6 +--
 .../spark/graphx/impl/EdgePartitionSuite.scala  |  6 +--
 .../graphx/impl/VertexPartitionSuite.scala      |  6 +--
 .../graphx/lib/ConnectedComponentsSuite.scala   |  6 +--
 .../graphx/lib/LabelPropagationSuite.scala      |  5 +-
 .../apache/spark/graphx/lib/PageRankSuite.scala |  6 +--
 .../spark/graphx/lib/SVDPlusPlusSuite.scala     |  5 +-
 .../spark/graphx/lib/ShortestPathsSuite.scala   |  6 +--
 .../lib/StronglyConnectedComponentsSuite.scala  |  6 +--
 .../spark/graphx/lib/TriangleCountSuite.scala   |  5 +-
 .../spark/graphx/util/BytecodeUtilsSuite.scala  |  4 +-
 .../graphx/util/GraphGeneratorsSuite.scala      |  5 +-
 mllib/pom.xml                                   |  7 +++
 .../org/apache/spark/ml/PipelineSuite.scala     |  4 +-
 .../LogisticRegressionSuite.scala               |  5 +-
 .../org/apache/spark/ml/param/ParamsSuite.scala |  4 +-
 .../spark/ml/recommendation/ALSSuite.scala      |  5 +-
 .../ml/regression/LinearRegressionSuite.scala   |  5 +-
 .../spark/ml/tuning/CrossValidatorSuite.scala   |  5 +-
 .../spark/ml/tuning/ParamGridBuilderSuite.scala |  5 +-
 .../mllib/api/python/PythonMLLibAPISuite.scala  |  5 +-
 .../LogisticRegressionSuite.scala               |  6 +--
 .../mllib/classification/NaiveBayesSuite.scala  |  8 ++--
 .../spark/mllib/classification/SVMSuite.scala   |  7 ++-
 .../StreamingLogisticRegressionSuite.scala      |  5 +-
 .../mllib/clustering/GaussianMixtureSuite.scala |  5 +-
 .../spark/mllib/clustering/KMeansSuite.scala    |  7 ++-
 .../spark/mllib/clustering/LDASuite.scala       |  5 +-
 .../PowerIterationClusteringSuite.scala         |  5 +-
 .../mllib/clustering/StreamingKMeansSuite.scala |  5 +-
 .../mllib/evaluation/AreaUnderCurveSuite.scala  |  5 +-
 .../BinaryClassificationMetricsSuite.scala      |  5 +-
 .../evaluation/MulticlassMetricsSuite.scala     |  5 +-
 .../evaluation/MultilabelMetricsSuite.scala     |  5 +-
 .../mllib/evaluation/RankingMetricsSuite.scala  |  5 +-
 .../evaluation/RegressionMetricsSuite.scala     |  5 +-
 .../mllib/feature/ChiSqSelectorSuite.scala      |  5 +-
 .../spark/mllib/feature/HashingTFSuite.scala    |  5 +-
 .../apache/spark/mllib/feature/IDFSuite.scala   |  5 +-
 .../spark/mllib/feature/NormalizerSuite.scala   |  5 +-
 .../mllib/feature/StandardScalerSuite.scala     |  5 +-
 .../spark/mllib/feature/Word2VecSuite.scala     |  5 +-
 .../apache/spark/mllib/fpm/FPGrowthSuite.scala  |  5 +-
 .../apache/spark/mllib/fpm/FPTreeSuite.scala    |  5 +-
 .../impl/PeriodicGraphCheckpointerSuite.scala   |  6 +--
 .../apache/spark/mllib/linalg/BLASSuite.scala   |  5 +-
 .../linalg/BreezeMatrixConversionSuite.scala    |  6 +--
 .../linalg/BreezeVectorConversionSuite.scala    |  6 +--
 .../spark/mllib/linalg/MatricesSuite.scala      |  4 +-
 .../spark/mllib/linalg/VectorsSuite.scala       |  5 +-
 .../linalg/distributed/BlockMatrixSuite.scala   |  5 +-
 .../distributed/CoordinateMatrixSuite.scala     |  5 +-
 .../distributed/IndexedRowMatrixSuite.scala     |  5 +-
 .../linalg/distributed/RowMatrixSuite.scala     |  6 +--
 .../optimization/GradientDescentSuite.scala     |  7 +--
 .../spark/mllib/optimization/LBFGSSuite.scala   |  7 +--
 .../spark/mllib/optimization/NNLSSuite.scala    |  5 +-
 .../mllib/random/RandomDataGeneratorSuite.scala |  5 +-
 .../spark/mllib/random/RandomRDDsSuite.scala    |  5 +-
 .../spark/mllib/rdd/RDDFunctionsSuite.scala     |  5 +-
 .../spark/mllib/recommendation/ALSSuite.scala   |  4 +-
 .../MatrixFactorizationModelSuite.scala         |  5 +-
 .../regression/IsotonicRegressionSuite.scala    |  5 +-
 .../mllib/regression/LabeledPointSuite.scala    |  5 +-
 .../spark/mllib/regression/LassoSuite.scala     |  7 ++-
 .../regression/LinearRegressionSuite.scala      |  7 ++-
 .../mllib/regression/RidgeRegressionSuite.scala |  6 +--
 .../StreamingLinearRegressionSuite.scala        |  5 +-
 .../spark/mllib/stat/CorrelationSuite.scala     |  5 +-
 .../spark/mllib/stat/HypothesisTestSuite.scala  |  6 +--
 .../MultivariateOnlineSummarizerSuite.scala     |  5 +-
 .../MultivariateGaussianSuite.scala             |  5 +-
 .../spark/mllib/tree/DecisionTreeSuite.scala    |  7 ++-
 .../mllib/tree/GradientBoostedTreesSuite.scala  |  5 +-
 .../apache/spark/mllib/tree/ImpuritySuite.scala |  5 +-
 .../spark/mllib/tree/RandomForestSuite.scala    |  5 +-
 .../mllib/tree/impl/BaggedPointSuite.scala      |  5 +-
 .../apache/spark/mllib/util/MLUtilsSuite.scala  |  5 +-
 .../spark/mllib/util/NumericParserSuite.scala   |  6 +--
 .../spark/mllib/util/TestingUtilsSuite.scala    |  4 +-
 repl/pom.xml                                    |  7 +++
 .../scala/org/apache/spark/repl/ReplSuite.scala |  5 +-
 .../scala/org/apache/spark/repl/ReplSuite.scala |  5 +-
 .../spark/repl/ExecutorClassLoaderSuite.scala   |  3 +-
 scalastyle-config.xml                           |  7 +++
 sql/catalyst/pom.xml                            |  7 +++
 .../spark/sql/catalyst/DistributionSuite.scala  |  5 +-
 .../sql/catalyst/ScalaReflectionSuite.scala     |  5 +-
 .../spark/sql/catalyst/SqlParserSuite.scala     |  4 +-
 .../sql/catalyst/analysis/AnalysisSuite.scala   |  5 +-
 .../analysis/DecimalPrecisionSuite.scala        |  5 +-
 .../expressions/AttributeSetSuite.scala         |  5 +-
 .../expressions/ExpressionEvaluationSuite.scala |  4 +-
 .../spark/sql/catalyst/plans/PlanTest.scala     |  5 +-
 .../sql/catalyst/plans/SameResultSuite.scala    |  5 +-
 .../sql/catalyst/trees/RuleExecutorSuite.scala  |  5 +-
 .../sql/catalyst/trees/TreeNodeSuite.scala      |  5 +-
 .../spark/sql/catalyst/util/MetadataSuite.scala |  4 +-
 .../spark/sql/types/DataTypeParserSuite.scala   |  4 +-
 .../apache/spark/sql/types/DataTypeSuite.scala  |  4 +-
 .../spark/sql/types/decimal/DecimalSuite.scala  |  5 +-
 sql/core/pom.xml                                |  7 +++
 .../scala/org/apache/spark/sql/RowSuite.scala   |  4 +-
 .../org/apache/spark/sql/SQLConfSuite.scala     |  5 +-
 .../sql/ScalaReflectionRelationSuite.scala      |  5 +-
 .../apache/spark/sql/SerializationSuite.scala   |  6 +--
 .../spark/sql/columnar/ColumnStatsSuite.scala   |  5 +-
 .../spark/sql/columnar/ColumnTypeSuite.scala    |  6 +--
 .../columnar/NullableColumnAccessorSuite.scala  |  5 +-
 .../columnar/NullableColumnBuilderSuite.scala   |  5 +-
 .../columnar/PartitionBatchPruningSuite.scala   |  5 +-
 .../compression/BooleanBitSetSuite.scala        |  5 +-
 .../compression/DictionaryEncodingSuite.scala   |  5 +-
 .../compression/IntegralDeltaSuite.scala        |  5 +-
 .../compression/RunLengthEncodingSuite.scala    |  5 +-
 .../spark/sql/execution/PlannerSuite.scala      |  5 +-
 .../sql/execution/debug/DebuggingSuite.scala    |  8 ++--
 .../execution/joins/HashedRelationSuite.scala   |  5 +-
 .../org/apache/spark/sql/jdbc/JDBCSuite.scala   |  5 +-
 .../apache/spark/sql/jdbc/JDBCWriteSuite.scala  |  5 +-
 .../spark/sql/parquet/ParquetSchemaSuite.scala  |  4 +-
 .../sql/sources/ResolvedDataSourceSuite.scala   |  4 +-
 sql/hive-thriftserver/pom.xml                   | 11 +++++
 .../spark/sql/hive/thriftserver/CliSuite.scala  |  6 +--
 .../thriftserver/HiveThriftServer2Suites.scala  | 28 ++++++++++--
 sql/hive/pom.xml                                |  7 +++
 .../spark/sql/catalyst/plans/PlanTest.scala     |  4 +-
 .../spark/sql/hive/HiveInspectorSuite.scala     |  4 +-
 .../sql/hive/HiveMetastoreCatalogSuite.scala    |  4 +-
 .../spark/sql/hive/SerializationSuite.scala     |  6 +--
 .../hive/execution/ConcurrentHiveSuite.scala    |  6 +--
 .../sql/hive/execution/HiveComparisonTest.scala |  6 +--
 streaming/pom.xml                               |  7 +++
 .../streaming/ReceivedBlockHandlerSuite.scala   |  8 +++-
 .../streaming/ReceivedBlockTrackerSuite.scala   |  6 +--
 .../spark/streaming/StreamingContextSuite.scala |  6 +--
 .../apache/spark/streaming/TestSuiteBase.scala  |  6 +--
 .../spark/streaming/UISeleniumSuite.scala       |  7 ++-
 .../rdd/WriteAheadLogBackedBlockRDDSuite.scala  |  9 ++--
 .../util/RateLimitedOutputStreamSuite.scala     |  4 +-
 .../streaming/util/WriteAheadLogSuite.scala     |  5 +-
 yarn/pom.xml                                    |  7 +++
 .../ClientDistributedCacheManagerSuite.scala    |  5 +-
 .../apache/spark/deploy/yarn/ClientSuite.scala  |  6 +--
 .../spark/deploy/yarn/YarnAllocatorSuite.scala  |  6 +--
 .../spark/deploy/yarn/YarnClusterSuite.scala    |  6 +--
 .../deploy/yarn/YarnSparkHadoopUtilSuite.scala  |  6 +--
 300 files changed, 863 insertions(+), 796 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/bagel/pom.xml
----------------------------------------------------------------------
diff --git a/bagel/pom.xml b/bagel/pom.xml
index 602cc7b..ea3a71a 100644
--- a/bagel/pom.xml
+++ b/bagel/pom.xml
@@ -41,6 +41,13 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-core_${scala.binary.version}</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.scalacheck</groupId>
       <artifactId>scalacheck_${scala.binary.version}</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
----------------------------------------------------------------------
diff --git a/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
index ccb262a..fb10d73 100644
--- a/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
+++ b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.bagel
 
-import org.scalatest.{BeforeAndAfter, FunSuite, Assertions}
+import org.scalatest.{BeforeAndAfter, Assertions}
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
@@ -27,7 +27,7 @@ import org.apache.spark.storage.StorageLevel
 class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
 class TestMessage(val targetId: String) extends Message[String] with Serializable
 
-class BagelSuite extends FunSuite with Assertions with BeforeAndAfter with Timeouts {
+class BagelSuite extends SparkFunSuite with Assertions with BeforeAndAfter with Timeouts {
 
   var sc: SparkContext = _
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index f6515c9..7d67942 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -316,6 +316,12 @@
     <dependency>
       <groupId>org.seleniumhq.selenium</groupId>
       <artifactId>selenium-java</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+      </exclusions>
       <scope>test</scope>
     </dependency>
     <!-- Added for selenium: -->

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index f087fc5..16c1200 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -19,11 +19,10 @@ package org.apache.spark
 
 import scala.collection.mutable
 
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
 
-class AccumulatorSuite extends FunSuite with Matchers with LocalSparkContext {
+class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
 
   implicit def setAccum[A] = new AccumulableParam[mutable.Set[A], A] {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala b/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
index 4b25c20..24556c1 100644
--- a/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark
 
 import org.mockito.Mockito._
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.mock.MockitoSugar
 
 import org.apache.spark.executor.DataReadMethod
@@ -26,7 +26,7 @@ import org.apache.spark.rdd.RDD
 import org.apache.spark.storage._
 
 // TODO: Test the CacheManager's thread-safety aspects
-class CacheManagerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter
+class CacheManagerSuite extends SparkFunSuite with LocalSparkContext with BeforeAndAfter
   with MockitoSugar {
 
   var blockManager: BlockManager = _

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index 3b10b3a..685475f 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -21,13 +21,11 @@ import java.io.File
 
 import scala.reflect.ClassTag
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.rdd._
 import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
 import org.apache.spark.util.Utils
 
-class CheckpointSuite extends FunSuite with LocalSparkContext with Logging {
+class CheckpointSuite extends SparkFunSuite with LocalSparkContext with Logging {
   var checkpointDir: File = _
   val partitioner = new HashPartitioner(2)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
index ae2ae7e..b5fb4ac 100644
--- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable.{HashSet, SynchronizedSet}
 import scala.language.existentials
 import scala.util.Random
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.time.SpanSugar._
@@ -43,7 +43,7 @@ import org.apache.spark.storage.ShuffleIndexBlockId
  * config options, in particular, a different shuffle manager class
  */
 abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[HashShuffleManager])
-  extends FunSuite with BeforeAndAfter with LocalSparkContext
+  extends SparkFunSuite with BeforeAndAfter with LocalSparkContext
 {
   implicit val defaultTimeout = timeout(10000 millis)
   val conf = new SparkConf()

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/DistributedSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 97ea357..97015fa 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.Matchers
 import org.scalatest.time.{Millis, Span}
@@ -28,7 +27,7 @@ class NotSerializableClass
 class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {}
 
 
-class DistributedSuite extends FunSuite with Matchers with LocalSparkContext {
+class DistributedSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
   val clusterUrl = "local-cluster[2,1,512]"
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/DriverSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index 9bd5dfe..129fa8d 100644
--- a/core/src/test/scala/org/apache/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -19,14 +19,13 @@ package org.apache.spark
 
 import java.io.File
 
-import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.prop.TableDrivenPropertyChecks._
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark.util.Utils
 
-class DriverSuite extends FunSuite with Timeouts {
+class DriverSuite extends SparkFunSuite with Timeouts {
 
   test("driver should exit after finishing without cleanup (SPARK-530)") {
     val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index 114c8d4..d3ce7a3 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark
 
 import scala.collection.mutable
 
-import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester}
+import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
 import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.scheduler._
 import org.apache.spark.scheduler.cluster.ExecutorInfo
@@ -28,7 +28,11 @@ import org.apache.spark.util.ManualClock
 /**
  * Test add and remove behavior of ExecutorAllocationManager.
  */
-class ExecutorAllocationManagerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter {
+class ExecutorAllocationManagerSuite
+  extends SparkFunSuite
+  with LocalSparkContext
+  with BeforeAndAfter {
+
   import ExecutorAllocationManager._
   import ExecutorAllocationManagerSuite._
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/FailureSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 1212d0b..0d2f11c 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.util.NonSerializable
 
 import java.io.NotSerializableException
@@ -38,7 +36,7 @@ object FailureSuiteState {
   }
 }
 
-class FailureSuite extends FunSuite with LocalSparkContext {
+class FailureSuite extends SparkFunSuite with LocalSparkContext {
 
   // Run a 3-task map job in which task 1 deterministically fails once, and check
   // whether the job completes successfully and we ran 4 tasks in total.

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/FileServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index 5fdf6bc..0b823be 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -25,13 +25,12 @@ import javax.net.ssl.SSLHandshakeException
 import com.google.common.io.ByteStreams
 import org.apache.commons.io.{FileUtils, IOUtils}
 import org.apache.commons.lang3.RandomUtils
-import org.scalatest.FunSuite
 
 import org.apache.spark.util.Utils
 
 import SSLSampleConfigs._
 
-class FileServerSuite extends FunSuite with LocalSparkContext {
+class FileServerSuite extends SparkFunSuite with LocalSparkContext {
 
   @transient var tmpDir: File = _
   @transient var tmpFile: File = _

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/FileSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 7acd27c..d07926c 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -30,12 +30,11 @@ import org.apache.hadoop.mapred.{JobConf, FileAlreadyExistsException, FileSplit,
 import org.apache.hadoop.mapreduce.Job
 import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
 import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
-import org.scalatest.FunSuite
 
 import org.apache.spark.rdd.{NewHadoopRDD, HadoopRDD}
 import org.apache.spark.util.Utils
 
-class FileSuite extends FunSuite with LocalSparkContext {
+class FileSuite extends SparkFunSuite with LocalSparkContext {
   var tempDir: File = _
 
   override def beforeEach() {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FutureActionSuite.scala b/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
index f5cdb01..1102aea 100644
--- a/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
@@ -20,10 +20,14 @@ package org.apache.spark
 import scala.concurrent.Await
 import scala.concurrent.duration.Duration
 
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+import org.scalatest.{BeforeAndAfter, Matchers}
 
 
-class FutureActionSuite extends FunSuite with BeforeAndAfter with Matchers with LocalSparkContext {
+class FutureActionSuite
+  extends SparkFunSuite
+  with BeforeAndAfter
+  with Matchers
+  with LocalSparkContext {
 
   before {
     sc = new SparkContext("local", "FutureActionSuite")

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
index d895230..0afd872 100644
--- a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.rdd.RDD
 
-class ImplicitOrderingSuite extends FunSuite with LocalSparkContext {
+class ImplicitOrderingSuite extends SparkFunSuite with LocalSparkContext {
   // Tests that PairRDDFunctions grabs an implicit Ordering in various cases where it should.
   test("basic inference of Orderings"){
     sc = new SparkContext("local", "test")
@@ -50,30 +48,30 @@ private object ImplicitOrderingSuite {
   class OrderedClass extends Ordered[OrderedClass] {
     override def compare(o: OrderedClass): Int = ???
   }
-  
+
   def basicMapExpectations(rdd: RDD[Int]) = {
-    List((rdd.map(x => (x, x)).keyOrdering.isDefined, 
+    List((rdd.map(x => (x, x)).keyOrdering.isDefined,
             "rdd.map(x => (x, x)).keyOrdering.isDefined"),
-          (rdd.map(x => (1, x)).keyOrdering.isDefined, 
+          (rdd.map(x => (1, x)).keyOrdering.isDefined,
             "rdd.map(x => (1, x)).keyOrdering.isDefined"),
-          (rdd.map(x => (x.toString, x)).keyOrdering.isDefined, 
+          (rdd.map(x => (x.toString, x)).keyOrdering.isDefined,
             "rdd.map(x => (x.toString, x)).keyOrdering.isDefined"),
-          (rdd.map(x => (null, x)).keyOrdering.isDefined, 
+          (rdd.map(x => (null, x)).keyOrdering.isDefined,
             "rdd.map(x => (null, x)).keyOrdering.isDefined"),
-          (rdd.map(x => (new NonOrderedClass, x)).keyOrdering.isEmpty, 
+          (rdd.map(x => (new NonOrderedClass, x)).keyOrdering.isEmpty,
             "rdd.map(x => (new NonOrderedClass, x)).keyOrdering.isEmpty"),
-          (rdd.map(x => (new ComparableClass, x)).keyOrdering.isDefined, 
+          (rdd.map(x => (new ComparableClass, x)).keyOrdering.isDefined,
             "rdd.map(x => (new ComparableClass, x)).keyOrdering.isDefined"),
-          (rdd.map(x => (new OrderedClass, x)).keyOrdering.isDefined, 
+          (rdd.map(x => (new OrderedClass, x)).keyOrdering.isDefined,
             "rdd.map(x => (new OrderedClass, x)).keyOrdering.isDefined"))
   }
-  
+
   def otherRDDMethodExpectations(rdd: RDD[Int]) = {
-    List((rdd.groupBy(x => x).keyOrdering.isDefined, 
+    List((rdd.groupBy(x => x).keyOrdering.isDefined,
            "rdd.groupBy(x => x).keyOrdering.isDefined"),
-         (rdd.groupBy(x => new NonOrderedClass).keyOrdering.isEmpty, 
+         (rdd.groupBy(x => new NonOrderedClass).keyOrdering.isEmpty,
            "rdd.groupBy(x => new NonOrderedClass).keyOrdering.isEmpty"),
-         (rdd.groupBy(x => new ComparableClass).keyOrdering.isDefined, 
+         (rdd.groupBy(x => new ComparableClass).keyOrdering.isDefined,
            "rdd.groupBy(x => new ComparableClass).keyOrdering.isDefined"),
          (rdd.groupBy(x => new OrderedClass).keyOrdering.isDefined,
            "rdd.groupBy(x => new OrderedClass).keyOrdering.isDefined"),
@@ -82,4 +80,4 @@ private object ImplicitOrderingSuite {
          (rdd.groupBy((x: Int) => x, new HashPartitioner(5)).keyOrdering.isDefined,
            "rdd.groupBy((x: Int) => x, new HashPartitioner(5)).keyOrdering.isDefined"))
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
index 21487bc..3363147 100644
--- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
@@ -24,7 +24,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
 import scala.concurrent.duration._
 import scala.concurrent.future
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.Matchers
 
 import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
@@ -34,7 +34,7 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
  * (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
  * in both FIFO and fair scheduling modes.
  */
-class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter
+class JobCancellationSuite extends SparkFunSuite with Matchers with BeforeAndAfter
   with LocalSparkContext {
 
   override def afterEach() {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index ccfe067..7771fa9 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -21,14 +21,13 @@ import scala.concurrent.Await
 
 import akka.actor._
 import akka.testkit.TestActorRef
-import org.scalatest.FunSuite
 
 import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
 import org.apache.spark.shuffle.FetchFailedException
 import org.apache.spark.storage.BlockManagerId
 import org.apache.spark.util.AkkaUtils
 
-class MapOutputTrackerSuite extends FunSuite {
+class MapOutputTrackerSuite extends SparkFunSuite {
   private val conf = new SparkConf
 
   test("master start and stop") {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index b753231..82c2c3b 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -20,12 +20,12 @@ package org.apache.spark
 import scala.collection.mutable.ArrayBuffer
 import scala.math.abs
 
-import org.scalatest.{FunSuite, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.StatCounter
 
-class PartitioningSuite extends FunSuite with SharedSparkContext with PrivateMethodTester {
+class PartitioningSuite extends SparkFunSuite with SharedSparkContext with PrivateMethodTester {
 
   test("HashPartitioner equality") {
     val p2 = new HashPartitioner(2)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
index 444a333..d7e00e3 100644
--- a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
@@ -21,9 +21,9 @@ import java.io.File
 
 import com.google.common.io.Files
 import org.apache.spark.util.Utils
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.BeforeAndAfterAll
 
-class SSLOptionsSuite extends FunSuite with BeforeAndAfterAll {
+class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
   test("test resolving property file as spark conf ") {
     val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
index 43fbd3f..f09d7f9 100644
--- a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
@@ -19,9 +19,7 @@ package org.apache.spark
 
 import java.io.File
 
-import org.scalatest.FunSuite
-
-class SecurityManagerSuite extends FunSuite {
+class SecurityManagerSuite extends SparkFunSuite {
 
   test("set security with conf") {
     val conf = new SparkConf

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index 30b6184..81099d0 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
 import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
@@ -26,7 +25,7 @@ import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.storage.{ShuffleDataBlockId, ShuffleBlockId}
 import org.apache.spark.util.MutablePair
 
-abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContext {
+abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
   val conf = new SparkConf(loadDefaults = false)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index e08210a..c3b98cb 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -21,12 +21,11 @@ import java.util.concurrent.{TimeUnit, Executors}
 
 import scala.util.{Try, Random}
 
-import org.scalatest.FunSuite
 import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
 import org.apache.spark.util.ResetSystemProperties
 import com.esotericsoftware.kryo.Kryo
 
-class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemProperties {
+class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
   test("loading from system properties") {
     System.setProperty("spark.test.testProperty", "2")
     val conf = new SparkConf()

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index e6ab538..2bdbd70 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark
 
-import org.scalatest.{Assertions, FunSuite}
+import org.scalatest.Assertions
 import org.apache.spark.storage.StorageLevel
 
-class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
+class SparkContextInfoSuite extends SparkFunSuite with LocalSparkContext {
   test("getPersistentRDDs only returns RDDs that are marked as cached") {
     sc = new SparkContext("local", "test")
     assert(sc.getPersistentRDDs.isEmpty === true)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index bbed8dd..bf72479 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark
 
-import org.scalatest.{FunSuite, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
 import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
@@ -25,7 +25,7 @@ import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, Me
 import org.apache.spark.scheduler.local.LocalBackend
 
 class SparkContextSchedulerCreationSuite
-  extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging {
+  extends SparkFunSuite with LocalSparkContext with PrivateMethodTester with Logging {
 
   def createTaskScheduler(master: String): TaskSchedulerImpl =
     createTaskScheduler(master, new SparkConf())

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index b4be8e3..451dbdd 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -23,8 +23,6 @@ import java.util.concurrent.TimeUnit
 import com.google.common.base.Charsets._
 import com.google.common.io.Files
 
-import org.scalatest.FunSuite
-
 import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
 import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
@@ -33,7 +31,7 @@ import org.apache.spark.util.Utils
 import scala.concurrent.Await
 import scala.concurrent.duration.Duration
 
-class SparkContextSuite extends FunSuite with LocalSparkContext {
+class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
 
   test("Only one SparkContext may be active at a time") {
     // Regression test for SPARK-4180

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
new file mode 100644
index 0000000..8cb3443
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark
+
+// scalastyle:off
+import org.scalatest.{FunSuite, Outcome}
+
+/**
+ * Base abstract class for all unit tests in Spark for handling common functionality.
+ */
+private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
+// scalastyle:on
+
+  /**
+   * Log the suite name and the test name before and after each test.
+   *
+   * Subclasses should never override this method. If they wish to run
+   * custom code before and after each test, they should should mix in
+   * the {{org.scalatest.BeforeAndAfter}} trait instead.
+   */
+  final protected override def withFixture(test: NoArgTest): Outcome = {
+    val testName = test.text
+    val suiteName = this.getClass.getName
+    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
+    try {
+      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
+      test()
+    } finally {
+      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala b/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
index 41d6ea2..d822cf4 100644
--- a/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
@@ -21,12 +21,12 @@ import scala.concurrent.duration._
 import scala.language.implicitConversions
 import scala.language.postfixOps
 
-import org.scalatest.{Matchers, FunSuite}
+import org.scalatest.Matchers
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark.JobExecutionStatus._
 
-class StatusTrackerSuite extends FunSuite with Matchers with LocalSparkContext {
+class StatusTrackerSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
   test("basic status API usage") {
     sc = new SparkContext("local", "test", new SparkConf(false))
@@ -85,4 +85,4 @@ class StatusTrackerSuite extends FunSuite with Matchers with LocalSparkContext {
       sc.statusTracker.getJobIdsForGroup("my-job-group").toSet should be (Set(firstJobId, secondJobId))
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index b5383d5..5f864da 100644
--- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -21,8 +21,6 @@ import java.util.concurrent.Semaphore
 import java.util.concurrent.atomic.AtomicBoolean
 import java.util.concurrent.atomic.AtomicInteger
 
-import org.scalatest.FunSuite
-
 /**
  * Holds state shared across task threads in some ThreadingSuite tests.
  */
@@ -36,7 +34,7 @@ object ThreadingSuiteState {
   }
 }
 
-class ThreadingSuite extends FunSuite with LocalSparkContext {
+class ThreadingSuite extends SparkFunSuite with LocalSparkContext {
 
   test("accessing SparkContext form a different thread") {
     sc = new SparkContext("local", "test")

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 42ff059..f7a13ab 100644
--- a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -17,11 +17,10 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.time.{Millis, Span}
 
-class UnpersistSuite extends FunSuite with LocalSparkContext {
+class UnpersistSuite extends SparkFunSuite with LocalSparkContext {
   test("unpersist RDD") {
     sc = new SparkContext("local", "test")
     val rdd = sc.makeRDD(Array(1, 2, 3, 4), 2).cache()

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
index 8959a84..135c56b 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
@@ -21,15 +21,15 @@ import scala.io.Source
 
 import java.io.{PrintWriter, File}
 
-import org.scalatest.{Matchers, FunSuite}
+import org.scalatest.Matchers
 
-import org.apache.spark.{SharedSparkContext, SparkConf}
+import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
 import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.util.Utils
 
 // This test suite uses SharedSparkContext because we need a SparkEnv in order to deserialize
 // a PythonBroadcast:
-class PythonBroadcastSuite extends FunSuite with Matchers with SharedSparkContext {
+class PythonBroadcastSuite extends SparkFunSuite with Matchers with SharedSparkContext {
   test("PythonBroadcast can be serialized with Kryo (SPARK-4882)") {
     val tempDir = Utils.createTempDir()
     val broadcastedString = "Hello, world!"

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
index c63d834..41f2a5c 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
@@ -19,9 +19,9 @@ package org.apache.spark.api.python
 
 import java.io.{ByteArrayOutputStream, DataOutputStream}
 
-import org.scalatest.FunSuite
+import org.apache.spark.SparkFunSuite
 
-class PythonRDDSuite extends FunSuite {
+class PythonRDDSuite extends SparkFunSuite {
 
   test("Writing large strings to the worker") {
     val input: List[String] = List("a"*100000)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala b/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
index f8c3932..267a79f 100644
--- a/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark.api.python
 
-import org.scalatest.FunSuite
+import org.apache.spark.{SharedSparkContext, SparkFunSuite}
 
-import org.apache.spark.SharedSparkContext
-
-class SerDeUtilSuite extends FunSuite with SharedSparkContext {
+class SerDeUtilSuite extends SparkFunSuite with SharedSparkContext {
 
   test("Converting an empty pair RDD to python does not throw an exception (SPARK-5441)") {
     val emptyRdd = sc.makeRDD(Seq[(Any, Any)]())

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala b/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
index af32726..19692f1 100644
--- a/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
@@ -19,9 +19,9 @@ package org.apache.spark.broadcast
 
 import scala.util.Random
 
-import org.scalatest.{Assertions, FunSuite}
+import org.scalatest.Assertions
 
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkEnv}
+import org.apache.spark._
 import org.apache.spark.io.SnappyCompressionCodec
 import org.apache.spark.rdd.RDD
 import org.apache.spark.serializer.JavaSerializer
@@ -43,7 +43,7 @@ class DummyBroadcastClass(rdd: RDD[Int]) extends Serializable {
   }
 }
 
-class BroadcastSuite extends FunSuite with LocalSparkContext {
+class BroadcastSuite extends SparkFunSuite with LocalSparkContext {
 
   private val httpConf = broadcastConf("HttpBroadcastFactory")
   private val torrentConf = broadcastConf("TorrentBroadcastFactory")

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala b/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
index 518073d..0149372 100644
--- a/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.deploy
 
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
-class ClientSuite extends FunSuite with Matchers {
+import org.apache.spark.SparkFunSuite
+
+class ClientSuite extends SparkFunSuite with Matchers {
   test("correctly validates driver jar URL's") {
     ClientArguments.isValidJarUrl("http://someHost:8080/foo.jar") should be (true)
     ClientArguments.isValidJarUrl("https://someHost:8080/foo.jar") should be (true)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/CommandUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/CommandUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/CommandUtilsSuite.scala
index 7915ee7..5ba398e 100644
--- a/core/src/test/scala/org/apache/spark/deploy/CommandUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/CommandUtilsSuite.scala
@@ -17,12 +17,13 @@
 
 package org.apache.spark.deploy
 
+import org.apache.spark.SparkFunSuite
 import org.apache.spark.deploy.worker.CommandUtils
 import org.apache.spark.util.Utils
 
-import org.scalatest.{FunSuite, Matchers}
+import org.scalatest.Matchers
 
-class CommandUtilsSuite extends FunSuite with Matchers {
+class CommandUtilsSuite extends SparkFunSuite with Matchers {
 
   test("set libraryPath correctly") {
     val appId = "12345-worker321-9876"

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 68b5776..cdfef82 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -23,14 +23,13 @@ import java.util.Date
 import com.fasterxml.jackson.core.JsonParseException
 import org.json4s._
 import org.json4s.jackson.JsonMethods
-import org.scalatest.FunSuite
 
 import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
 import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo}
 import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
-class JsonProtocolSuite extends FunSuite {
+class JsonProtocolSuite extends SparkFunSuite {
 
   test("writeApplicationInfo") {
     val output = JsonProtocol.writeApplicationInfo(createAppInfo())

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
index 54dd7c9..78d218a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
@@ -22,13 +22,11 @@ import java.net.URL
 import scala.collection.mutable
 import scala.io.Source
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.scheduler.cluster.ExecutorInfo
 import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener}
-import org.apache.spark.{SparkConf, SparkContext, LocalSparkContext}
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
 
-class LogUrlsStandaloneSuite extends FunSuite with LocalSparkContext {
+class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
 
   /** Length of time to wait while draining listener events. */
   private val WAIT_TIMEOUT_MILLIS = 10000

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
index bb6251f..a4c72aa 100644
--- a/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.deploy
 
-import org.scalatest.FunSuite
+import org.apache.spark.SparkFunSuite
 
-class PythonRunnerSuite extends FunSuite {
+class PythonRunnerSuite extends SparkFunSuite {
 
   // Test formatting a single path to be added to the PYTHONPATH
   test("format path") {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index cea4832..e59b56a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
 
 import com.google.common.base.Charsets.UTF_8
 import com.google.common.io.ByteStreams
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
@@ -34,7 +33,12 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
 
 // Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch
 // of properties that neeed to be cleared after tests.
-class SparkSubmitSuite extends FunSuite with Matchers with ResetSystemProperties with Timeouts {
+class SparkSubmitSuite
+  extends SparkFunSuite
+  with Matchers
+  with ResetSystemProperties
+  with Timeouts {
+
   def beforeAll() {
     System.setProperty("spark.testing", "true")
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 8bcca92..95a05c9 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -21,12 +21,14 @@ import java.io.{PrintStream, OutputStream, File}
 
 import scala.collection.mutable.ArrayBuffer
 
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.ivy.core.module.descriptor.MDArtifact
 import org.apache.ivy.plugins.resolver.IBiblioResolver
 
-class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
+import org.apache.spark.SparkFunSuite
+
+class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
   private val noOpOutputStream = new OutputStream {
     def write(b: Int) = {}

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index fcae603..6e2a118 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -24,15 +24,15 @@ import scala.io.Source
 
 import org.apache.hadoop.fs.Path
 import org.json4s.jackson.JsonMethods._
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.Matchers
 
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
 import org.apache.spark.io._
 import org.apache.spark.scheduler._
 import org.apache.spark.util.{JsonProtocol, Utils}
 
-class FsHistoryProviderSuite extends FunSuite with BeforeAndAfter with Matchers with Logging {
+class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
 
   private var testDir: File = null
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 3a9963a..0db554c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -23,13 +23,13 @@ import scala.collection.mutable
 
 import org.apache.hadoop.fs.Path
 import org.mockito.Mockito.{when}
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 import org.scalatest.mock.MockitoSugar
 
+import org.apache.spark.SparkFunSuite
 import org.apache.spark.ui.SparkUI
 
-class HistoryServerSuite extends FunSuite with Matchers with MockitoSugar {
+class HistoryServerSuite extends SparkFunSuite with Matchers with MockitoSugar {
 
   test("generate history page with relative links") {
     val historyServer = mock[HistoryServer]

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
index 33c6f7f..7cada5c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
@@ -24,13 +24,13 @@ import scala.concurrent.duration._
 import scala.language.postfixOps
 
 import akka.actor.Address
-import org.scalatest.{FunSuite, Matchers}
+import org.scalatest.Matchers
 import other.supplier.{CustomPersistenceEngine, CustomRecoveryModeFactory}
 
 import org.apache.spark.deploy._
-import org.apache.spark.{SparkConf, SparkException}
+import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
 
-class MasterSuite extends FunSuite with Matchers {
+class MasterSuite extends SparkFunSuite with Matchers {
 
   test("toAkkaUrl") {
     val conf = new SparkConf(loadDefaults = false)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
index 2fa90e3..eebbe85 100644
--- a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
@@ -25,7 +25,7 @@ import scala.collection.mutable
 
 import akka.actor.{Actor, ActorRef, ActorSystem, Props}
 import com.google.common.base.Charsets
-import org.scalatest.{BeforeAndAfterEach, FunSuite}
+import org.scalatest.BeforeAndAfterEach
 import org.json4s.JsonAST._
 import org.json4s.jackson.JsonMethods._
 
@@ -38,7 +38,7 @@ import org.apache.spark.deploy.master.DriverState._
 /**
  * Tests for the REST application submission protocol used in standalone cluster mode.
  */
-class StandaloneRestSubmitSuite extends FunSuite with BeforeAndAfterEach {
+class StandaloneRestSubmitSuite extends SparkFunSuite with BeforeAndAfterEach {
   private val client = new StandaloneRestClient
   private var actorSystem: Option[ActorSystem] = None
   private var server: Option[StandaloneRestServer] = None

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
index 1d64ec2..ce57660 100644
--- a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
@@ -21,14 +21,13 @@ import java.lang.Boolean
 import java.lang.Integer
 
 import org.json4s.jackson.JsonMethods._
-import org.scalatest.FunSuite
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
 /**
  * Tests for the REST application submission protocol.
  */
-class SubmitRestProtocolSuite extends FunSuite {
+class SubmitRestProtocolSuite extends SparkFunSuite {
 
   test("validate") {
     val request = new DummyRequest

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
index aa6e487..f96ec9c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
@@ -23,13 +23,12 @@ import org.mockito.Mockito._
 import org.mockito.Matchers._
 import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
-import org.scalatest.FunSuite
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 import org.apache.spark.deploy.{Command, DriverDescription}
 import org.apache.spark.util.Clock
 
-class DriverRunnerTest extends FunSuite {
+class DriverRunnerTest extends SparkFunSuite {
   private def createDriverRunner() = {
     val command = new Command("mainClass", Seq(), Map(), Seq(), Seq(), Seq())
     val driverDescription = new DriverDescription("jarUrl", 512, 1, true, command)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
index 6fca632..48093d1 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
@@ -21,12 +21,10 @@ import java.io.File
 
 import scala.collection.JavaConversions._
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
-class ExecutorRunnerTest extends FunSuite {
+class ExecutorRunnerTest extends SparkFunSuite {
   test("command includes appId") {
     val appId = "12345-worker321-9876"
     val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
index 372d7aa..f72aaf8 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
@@ -18,11 +18,10 @@
 
 package org.apache.spark.deploy.worker
 
-import org.apache.spark.SparkConf
-import org.scalatest.FunSuite
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
 
-class WorkerArgumentsTest extends FunSuite {
+class WorkerArgumentsTest extends SparkFunSuite {
 
   test("Memory can't be set to 0 when cmd line args leave off M or G") {
     val conf = new SparkConf

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
index 84e2fd7..ac2b7f7 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.deploy.worker
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 import org.apache.spark.deploy.Command
 
-import org.scalatest.{Matchers, FunSuite}
+import org.scalatest.Matchers
 
-class WorkerSuite extends FunSuite with Matchers {
+class WorkerSuite extends SparkFunSuite with Matchers {
 
   def cmd(javaOpts: String*) = Command("", Seq.empty, Map.empty, Seq.empty, Seq.empty, Seq(javaOpts:_*))
   def conf(opts: (String, String)*) = new SparkConf(loadDefaults = false).setAll(opts)

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
index 5e538d6..cc57931 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
@@ -20,9 +20,10 @@ package org.apache.spark.deploy.worker
 import akka.actor.{ActorSystem, AddressFromURIString, Props}
 import akka.testkit.TestActorRef
 import akka.remote.DisassociatedEvent
-import org.scalatest.FunSuite
 
-class WorkerWatcherSuite extends FunSuite {
+import org.apache.spark.SparkFunSuite
+
+class WorkerWatcherSuite extends SparkFunSuite {
   test("WorkerWatcher shuts down on valid disassociation") {
     val actorSystem = ActorSystem("test")
     val targetWorkerUrl = "akka://1.2.3.4/user/Worker"

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
index 2d02122..572360d 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
@@ -20,9 +20,11 @@ package org.apache.spark.deploy.worker.ui
 import java.io.{File, FileWriter}
 
 import org.mockito.Mockito.mock
-import org.scalatest.{FunSuite, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
-class LogPageSuite extends FunSuite with PrivateMethodTester {
+import org.apache.spark.SparkFunSuite
+
+class LogPageSuite extends SparkFunSuite with PrivateMethodTester {
 
   test("get logs simple") {
     val webui = mock(classOf[WorkerWebUI])

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala b/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
index 326e203..8275fd8 100644
--- a/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.executor
 
-import org.scalatest.FunSuite
+import org.apache.spark.SparkFunSuite
 
-class TaskMetricsSuite extends FunSuite {
+class TaskMetricsSuite extends SparkFunSuite {
   test("[SPARK-5701] updateShuffleReadMetrics: ShuffleReadMetrics not added when no shuffle deps") {
     val taskMetrics = new TaskMetrics()
     taskMetrics.updateShuffleReadMetrics()

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
index 2e58c15..63947df 100644
--- a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
@@ -24,11 +24,10 @@ import java.io.FileOutputStream
 import scala.collection.immutable.IndexedSeq
 
 import org.scalatest.BeforeAndAfterAll
-import org.scalatest.FunSuite
 
 import org.apache.hadoop.io.Text
 
-import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
 import org.apache.spark.util.Utils
 import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, GzipCodec}
 
@@ -37,7 +36,7 @@ import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, Gzi
  * [[org.apache.spark.input.WholeTextFileRecordReader WholeTextFileRecordReader]]. A temporary
  * directory is created as fake input. Temporal storage would be deleted in the end.
  */
-class WholeTextFileRecordReaderSuite extends FunSuite with BeforeAndAfterAll {
+class WholeTextFileRecordReaderSuite extends SparkFunSuite with BeforeAndAfterAll {
   private var sc: SparkContext = _
   private var factory: CompressionCodecFactory = _
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index 8c6035f..f2a0c46 100644
--- a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -19,11 +19,9 @@ package org.apache.spark.io
 
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 
-import org.scalatest.FunSuite
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
-import org.apache.spark.SparkConf
-
-class CompressionCodecSuite extends FunSuite {
+class CompressionCodecSuite extends SparkFunSuite {
   val conf = new SparkConf(false)
 
   def testCodec(codec: CompressionCodec) {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
index 78fa98a..1621a14 100644
--- a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
@@ -36,14 +36,14 @@ import org.apache.hadoop.mapreduce.lib.input.{CombineFileInputFormat => NewCombi
 import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
 import org.apache.hadoop.mapreduce.{TaskAttemptContext, InputSplit => NewInputSplit,
   RecordReader => NewRecordReader}
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 
-import org.apache.spark.SharedSparkContext
+import org.apache.spark.{SharedSparkContext, SparkFunSuite}
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
 import org.apache.spark.util.Utils
 
-class InputOutputMetricsSuite extends FunSuite with SharedSparkContext
+class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext
   with BeforeAndAfter {
 
   @transient var tmpDir: File = _

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index 37e5284..f15dfd9 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -17,9 +17,11 @@
 
 package org.apache.spark.metrics
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 
-class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
+import org.apache.spark.SparkFunSuite
+
+class MetricsConfigSuite extends SparkFunSuite with BeforeAndAfter {
   var filePath: String = _
 
   before {

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index bbdc956..9c389c7 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.metrics
 
-import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester}
+import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
 
-import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.deploy.master.MasterSource
 import org.apache.spark.metrics.source.Source
 
@@ -27,7 +27,7 @@ import com.codahale.metrics.MetricRegistry
 
 import scala.collection.mutable.ArrayBuffer
 
-class MetricsSystemSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester{
+class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateMethodTester{
   var filePath: String = _
   var conf: SparkConf = null
   var securityMgr: SecurityManager = null

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
index 94bfa67..a27a088 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
@@ -29,12 +29,12 @@ import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
 import org.apache.spark.network.shuffle.BlockFetchingListener
 import org.apache.spark.network.{BlockDataManager, BlockTransferService}
 import org.apache.spark.storage.{BlockId, ShuffleBlockId}
-import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.mockito.Mockito._
 import org.scalatest.mock.MockitoSugar
-import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, ShouldMatchers}
+import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, ShouldMatchers}
 
-class NettyBlockTransferSecuritySuite extends FunSuite with MockitoSugar with ShouldMatchers {
+class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with ShouldMatchers {
   test("security default off") {
     val conf = new SparkConf()
       .set("spark.app.id", "app-id")

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/network/nio/ConnectionManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/network/nio/ConnectionManagerSuite.scala b/core/src/test/scala/org/apache/spark/network/nio/ConnectionManagerSuite.scala
index 716f875..b956d0b 100644
--- a/core/src/test/scala/org/apache/spark/network/nio/ConnectionManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/nio/ConnectionManagerSuite.scala
@@ -24,15 +24,13 @@ import scala.concurrent.duration._
 import scala.concurrent.{Await, TimeoutException}
 import scala.language.postfixOps
 
-import org.scalatest.FunSuite
-
-import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.util.Utils
 
 /**
   * Test the ConnectionManager with various security settings.
   */
-class ConnectionManagerSuite extends FunSuite {
+class ConnectionManagerSuite extends SparkFunSuite {
 
   test("security default off") {
     val conf = new SparkConf

http://git-wip-us.apache.org/repos/asf/spark/blob/e5747ee3/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
index f2b0ea1..ec99f2a 100644
--- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
@@ -23,13 +23,13 @@ import scala.concurrent.{Await, TimeoutException}
 import scala.concurrent.duration.Duration
 import scala.concurrent.ExecutionContext.Implicits.global
 
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.BeforeAndAfterAll
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
-import org.apache.spark.{SparkContext, SparkException, LocalSparkContext}
+import org.apache.spark.{LocalSparkContext, SparkContext, SparkException, SparkFunSuite}
 
-class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with Timeouts {
+class AsyncRDDActionsSuite extends SparkFunSuite with BeforeAndAfterAll with Timeouts {
 
   @transient private var sc: SparkContext = _
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org