You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by an...@apache.org on 2015/06/04 05:47:05 UTC

[6/6] spark git commit: [SPARK-7558] Demarcate tests in unit-tests.log (1.4)

[SPARK-7558] Demarcate tests in unit-tests.log (1.4)

This includes the following commits:

original: 9eb222c
hotfix1: 8c99793
hotfix2: a4f2412
scalastyle check: 609c492

---
Original patch #6441
Branch-1.3 patch #6602

Author: Andrew Or <an...@databricks.com>

Closes #6598 from andrewor14/demarcate-tests-1.4 and squashes the following commits:

4c3c566 [Andrew Or] Merge branch 'branch-1.4' of github.com:apache/spark into demarcate-tests-1.4
e217b78 [Andrew Or] [SPARK-7558] Guard against direct uses of FunSuite / FunSuiteLike
46d4361 [Andrew Or] Various whitespace changes (minor)
3d9bf04 [Andrew Or] Make all test suites extend SparkFunSuite instead of FunSuite
eaa520e [Andrew Or] Fix tests?
b4d93de [Andrew Or] Fix tests
634a777 [Andrew Or] Fix log message
a932e8d [Andrew Or] Fix manual things that cannot be covered through automation
8bc355d [Andrew Or] Add core tests as dependencies in all modules
75d361f [Andrew Or] Introduce base abstract class for all test suites


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bfe74b34
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bfe74b34
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bfe74b34

Branch: refs/heads/branch-1.4
Commit: bfe74b34a6ac6dada8749ffd9bcdc5f228741ea7
Parents: 584a2ba
Author: Andrew Or <an...@databricks.com>
Authored: Wed Jun 3 20:46:44 2015 -0700
Committer: Andrew Or <an...@databricks.com>
Committed: Wed Jun 3 20:46:44 2015 -0700

----------------------------------------------------------------------
 bagel/pom.xml                                   |  7 +++
 .../org/apache/spark/bagel/BagelSuite.scala     |  4 +-
 core/pom.xml                                    |  6 +++
 .../org/apache/spark/AccumulatorSuite.scala     |  3 +-
 .../org/apache/spark/CacheManagerSuite.scala    |  4 +-
 .../org/apache/spark/CheckpointSuite.scala      |  4 +-
 .../org/apache/spark/ContextCleanerSuite.scala  |  4 +-
 .../org/apache/spark/DistributedSuite.scala     |  3 +-
 .../scala/org/apache/spark/DriverSuite.scala    |  3 +-
 .../spark/ExecutorAllocationManagerSuite.scala  |  8 +++-
 .../scala/org/apache/spark/FailureSuite.scala   |  4 +-
 .../org/apache/spark/FileServerSuite.scala      |  3 +-
 .../test/scala/org/apache/spark/FileSuite.scala |  3 +-
 .../org/apache/spark/FutureActionSuite.scala    |  8 +++-
 .../apache/spark/HeartbeatReceiverSuite.scala   |  3 +-
 .../apache/spark/ImplicitOrderingSuite.scala    |  4 +-
 .../org/apache/spark/JobCancellationSuite.scala |  4 +-
 .../apache/spark/MapOutputTrackerSuite.scala    |  3 +-
 .../org/apache/spark/PartitioningSuite.scala    |  4 +-
 .../org/apache/spark/SSLOptionsSuite.scala      |  4 +-
 .../org/apache/spark/SecurityManagerSuite.scala |  4 +-
 .../scala/org/apache/spark/ShuffleSuite.scala   |  3 +-
 .../scala/org/apache/spark/SparkConfSuite.scala |  3 +-
 .../apache/spark/SparkContextInfoSuite.scala    |  4 +-
 .../SparkContextSchedulerCreationSuite.scala    |  4 +-
 .../org/apache/spark/SparkContextSuite.scala    |  4 +-
 .../scala/org/apache/spark/SparkFunSuite.scala  | 48 ++++++++++++++++++++
 .../org/apache/spark/StatusTrackerSuite.scala   |  4 +-
 .../scala/org/apache/spark/ThreadingSuite.scala |  3 +-
 .../scala/org/apache/spark/UnpersistSuite.scala |  3 +-
 .../spark/api/python/PythonBroadcastSuite.scala |  6 +--
 .../spark/api/python/PythonRDDSuite.scala       |  4 +-
 .../spark/api/python/SerDeUtilSuite.scala       |  6 +--
 .../apache/spark/broadcast/BroadcastSuite.scala |  6 +--
 .../org/apache/spark/deploy/ClientSuite.scala   |  5 +-
 .../apache/spark/deploy/JsonProtocolSuite.scala |  5 +-
 .../spark/deploy/LogUrlsStandaloneSuite.scala   |  6 +--
 .../apache/spark/deploy/PythonRunnerSuite.scala |  5 +-
 .../apache/spark/deploy/SparkSubmitSuite.scala  |  8 +++-
 .../spark/deploy/SparkSubmitUtilsSuite.scala    |  5 +-
 .../deploy/history/FsHistoryProviderSuite.scala |  6 +--
 .../deploy/history/HistoryServerSuite.scala     |  6 +--
 .../spark/deploy/master/MasterSuite.scala       |  6 +--
 .../deploy/rest/StandaloneRestSubmitSuite.scala |  4 +-
 .../deploy/rest/SubmitRestProtocolSuite.scala   |  5 +-
 .../spark/deploy/worker/CommandUtilsSuite.scala |  5 +-
 .../spark/deploy/worker/DriverRunnerTest.scala  |  5 +-
 .../deploy/worker/ExecutorRunnerTest.scala      |  6 +--
 .../deploy/worker/WorkerArgumentsTest.scala     |  5 +-
 .../spark/deploy/worker/WorkerSuite.scala       |  6 +--
 .../deploy/worker/WorkerWatcherSuite.scala      |  5 +-
 .../spark/deploy/worker/ui/LogPageSuite.scala   |  6 ++-
 .../spark/executor/TaskMetricsSuite.scala       |  4 +-
 .../input/WholeTextFileRecordReaderSuite.scala  |  5 +-
 .../apache/spark/io/CompressionCodecSuite.scala |  5 +-
 .../spark/metrics/InputOutputMetricsSuite.scala |  6 +--
 .../spark/metrics/MetricsConfigSuite.scala      |  6 ++-
 .../spark/metrics/MetricsSystemSuite.scala      |  6 +--
 .../netty/NettyBlockTransferSecuritySuite.scala |  6 +--
 .../netty/NettyBlockTransferServiceSuite.scala  |  8 +++-
 .../network/nio/ConnectionManagerSuite.scala    |  6 +--
 .../apache/spark/rdd/AsyncRDDActionsSuite.scala |  6 +--
 .../org/apache/spark/rdd/DoubleRDDSuite.scala   |  4 +-
 .../org/apache/spark/rdd/JdbcRDDSuite.scala     |  6 +--
 .../spark/rdd/PairRDDFunctionsSuite.scala       |  6 +--
 .../rdd/ParallelCollectionSplitSuite.scala      |  5 +-
 .../spark/rdd/PartitionPruningRDDSuite.scala    |  6 +--
 .../rdd/PartitionwiseSampledRDDSuite.scala      |  6 +--
 .../org/apache/spark/rdd/PipedRDDSuite.scala    |  3 +-
 .../spark/rdd/RDDOperationScopeSuite.scala      |  6 +--
 .../scala/org/apache/spark/rdd/RDDSuite.scala   |  4 +-
 .../org/apache/spark/rdd/SortingSuite.scala     |  5 +-
 .../spark/rdd/ZippedPartitionsSuite.scala       |  5 +-
 .../org/apache/spark/rpc/RpcEnvSuite.scala      |  6 +--
 .../CoarseGrainedSchedulerBackendSuite.scala    |  6 +--
 .../spark/scheduler/DAGSchedulerSuite.scala     |  4 +-
 .../scheduler/EventLoggingListenerSuite.scala   |  4 +-
 .../apache/spark/scheduler/MapStatusSuite.scala |  5 +-
 .../OutputCommitCoordinatorSuite.scala          |  4 +-
 .../org/apache/spark/scheduler/PoolSuite.scala  |  6 +--
 .../spark/scheduler/ReplayListenerSuite.scala   |  6 +--
 .../spark/scheduler/SparkListenerSuite.scala    |  6 +--
 .../SparkListenerWithClusterSuite.scala         |  8 ++--
 .../spark/scheduler/TaskContextSuite.scala      |  3 +-
 .../spark/scheduler/TaskResultGetterSuite.scala |  6 +--
 .../scheduler/TaskSchedulerImplSuite.scala      |  4 +-
 .../spark/scheduler/TaskSetManagerSuite.scala   |  4 +-
 .../cluster/mesos/MemoryUtilsSuite.scala        |  5 +-
 .../mesos/MesosSchedulerBackendSuite.scala      |  5 +-
 .../mesos/MesosTaskLaunchDataSuite.scala        |  4 +-
 .../mesos/MesosClusterSchedulerSuite.scala      |  5 +-
 .../spark/serializer/JavaSerializerSuite.scala  |  5 +-
 .../KryoSerializerDistributedSuite.scala        |  5 +-
 .../KryoSerializerResizableOutputSuite.scala    |  6 +--
 .../spark/serializer/KryoSerializerSuite.scala  |  7 ++-
 .../ProactiveClosureSerializationSuite.scala    |  6 +--
 .../serializer/SerializationDebuggerSuite.scala |  6 ++-
 .../serializer/SerializerPropertiesSuite.scala  |  6 +--
 .../shuffle/ShuffleMemoryManagerSuite.scala     |  5 +-
 .../shuffle/hash/HashShuffleManagerSuite.scala  |  6 +--
 .../unsafe/UnsafeShuffleManagerSuite.scala      |  4 +-
 .../status/api/v1/SimpleDateParamSuite.scala    |  6 ++-
 .../org/apache/spark/storage/BlockIdSuite.scala |  4 +-
 .../storage/BlockManagerReplicationSuite.scala  |  6 +--
 .../spark/storage/BlockManagerSuite.scala       |  4 +-
 .../spark/storage/BlockObjectWriterSuite.scala  |  6 +--
 .../spark/storage/DiskBlockManagerSuite.scala   |  6 +--
 .../spark/storage/FlatmapIteratorSuite.scala    |  5 +-
 .../apache/spark/storage/LocalDirsSuite.scala   |  6 +--
 .../ShuffleBlockFetcherIteratorSuite.scala      |  5 +-
 .../storage/StorageStatusListenerSuite.scala    |  5 +-
 .../org/apache/spark/storage/StorageSuite.scala |  4 +-
 .../org/apache/spark/ui/UISeleniumSuite.scala   |  2 +-
 .../scala/org/apache/spark/ui/UISuite.scala     |  5 +-
 .../ui/jobs/JobProgressListenerSuite.scala      |  3 +-
 .../scope/RDDOperationGraphListenerSuite.scala  |  6 +--
 .../spark/ui/storage/StorageTabSuite.scala      |  6 +--
 .../org/apache/spark/util/AkkaUtilsSuite.scala  |  3 +-
 .../apache/spark/util/ClosureCleanerSuite.scala |  6 +--
 .../spark/util/ClosureCleanerSuite2.scala       |  6 +--
 .../spark/util/CompletionIteratorSuite.scala    |  4 +-
 .../apache/spark/util/DistributionSuite.scala   |  5 +-
 .../org/apache/spark/util/EventLoopSuite.scala  |  5 +-
 .../apache/spark/util/FileAppenderSuite.scala   |  6 +--
 .../apache/spark/util/JsonProtocolSuite.scala   |  3 +-
 .../spark/util/MutableURLClassLoaderSuite.scala |  6 +--
 .../apache/spark/util/NextIteratorSuite.scala   |  5 +-
 .../spark/util/ResetSystemProperties.scala      |  4 +-
 .../apache/spark/util/SizeEstimatorSuite.scala  |  9 +++-
 .../apache/spark/util/ThreadUtilsSuite.scala    |  4 +-
 .../spark/util/TimeStampedHashMapSuite.scala    |  4 +-
 .../org/apache/spark/util/UtilsSuite.scala      |  5 +-
 .../org/apache/spark/util/VectorSuite.scala     |  4 +-
 .../util/collection/AppendOnlyMapSuite.scala    |  4 +-
 .../spark/util/collection/BitSetSuite.scala     |  4 +-
 .../util/collection/ChainedBufferSuite.scala    |  5 +-
 .../util/collection/CompactBufferSuite.scala    |  4 +-
 .../collection/ExternalAppendOnlyMapSuite.scala |  4 +-
 .../util/collection/ExternalSorterSuite.scala   |  4 +-
 .../util/collection/OpenHashMapSuite.scala      |  4 +-
 .../util/collection/OpenHashSetSuite.scala      |  4 +-
 .../PartitionedSerializedPairBufferSuite.scala  |  5 +-
 .../PrimitiveKeyOpenHashMapSuite.scala          |  4 +-
 .../util/collection/PrimitiveVectorSuite.scala  |  5 +-
 .../util/collection/SizeTrackerSuite.scala      |  5 +-
 .../spark/util/collection/SorterSuite.scala     |  5 +-
 .../io/ByteArrayChunkOutputStreamSuite.scala    |  4 +-
 .../spark/util/random/RandomSamplerSuite.scala  |  6 ++-
 .../spark/util/random/SamplingUtilsSuite.scala  |  5 +-
 .../spark/util/random/XORShiftRandomSuite.scala |  4 +-
 .../streaming/flume/sink/SparkSinkSuite.scala   |  9 ++++
 external/flume/pom.xml                          |  7 +++
 .../flume/FlumePollingStreamSuite.scala         |  6 +--
 .../streaming/flume/FlumeStreamSuite.scala      |  6 +--
 external/kafka/pom.xml                          |  7 +++
 .../kafka/DirectKafkaStreamSuite.scala          |  6 +--
 .../streaming/kafka/KafkaClusterSuite.scala     |  6 ++-
 .../spark/streaming/kafka/KafkaRDDSuite.scala   |  4 +-
 .../streaming/kafka/KafkaStreamSuite.scala      |  6 +--
 .../kafka/ReliableKafkaStreamSuite.scala        |  6 +--
 external/mqtt/pom.xml                           |  7 +++
 .../spark/streaming/mqtt/MQTTStreamSuite.scala  |  6 +--
 external/twitter/pom.xml                        |  7 +++
 .../streaming/twitter/TwitterStreamSuite.scala  |  6 +--
 external/zeromq/pom.xml                         |  7 +++
 .../streaming/zeromq/ZeroMQStreamSuite.scala    |  4 +-
 graphx/pom.xml                                  |  7 +++
 .../org/apache/spark/graphx/EdgeRDDSuite.scala  |  5 +-
 .../org/apache/spark/graphx/EdgeSuite.scala     |  4 +-
 .../org/apache/spark/graphx/GraphOpsSuite.scala |  5 +-
 .../org/apache/spark/graphx/GraphSuite.scala    |  6 +--
 .../org/apache/spark/graphx/PregelSuite.scala   |  6 +--
 .../apache/spark/graphx/VertexRDDSuite.scala    |  6 +--
 .../spark/graphx/impl/EdgePartitionSuite.scala  |  6 +--
 .../graphx/impl/VertexPartitionSuite.scala      |  6 +--
 .../graphx/lib/ConnectedComponentsSuite.scala   |  6 +--
 .../graphx/lib/LabelPropagationSuite.scala      |  5 +-
 .../apache/spark/graphx/lib/PageRankSuite.scala |  5 +-
 .../spark/graphx/lib/SVDPlusPlusSuite.scala     |  5 +-
 .../spark/graphx/lib/ShortestPathsSuite.scala   |  6 +--
 .../lib/StronglyConnectedComponentsSuite.scala  |  6 +--
 .../spark/graphx/lib/TriangleCountSuite.scala   |  5 +-
 .../spark/graphx/util/BytecodeUtilsSuite.scala  |  4 +-
 .../graphx/util/GraphGeneratorsSuite.scala      |  5 +-
 mllib/pom.xml                                   |  7 +++
 .../spark/ml/util/IdentifiableSuite.scala       |  4 +-
 .../org/apache/spark/ml/PipelineSuite.scala     |  4 +-
 .../ml/attribute/AttributeGroupSuite.scala      |  4 +-
 .../spark/ml/attribute/AttributeSuite.scala     |  5 +-
 .../DecisionTreeClassifierSuite.scala           |  7 ++-
 .../ml/classification/GBTClassifierSuite.scala  |  5 +-
 .../LogisticRegressionSuite.scala               |  5 +-
 .../ml/classification/OneVsRestSuite.scala      |  5 +-
 .../RandomForestClassifierSuite.scala           |  5 +-
 .../evaluation/RegressionEvaluatorSuite.scala   |  5 +-
 .../spark/ml/feature/BinarizerSuite.scala       |  5 +-
 .../spark/ml/feature/BucketizerSuite.scala      |  8 ++--
 .../spark/ml/feature/HashingTFSuite.scala       |  5 +-
 .../org/apache/spark/ml/feature/IDFSuite.scala  |  5 +-
 .../spark/ml/feature/NormalizerSuite.scala      |  5 +-
 .../spark/ml/feature/OneHotEncoderSuite.scala   |  5 +-
 .../ml/feature/PolynomialExpansionSuite.scala   |  4 +-
 .../spark/ml/feature/StringIndexerSuite.scala   |  5 +-
 .../spark/ml/feature/TokenizerSuite.scala       |  7 ++-
 .../spark/ml/feature/VectorAssemblerSuite.scala |  6 +--
 .../spark/ml/feature/VectorIndexerSuite.scala   |  6 +--
 .../apache/spark/ml/feature/Word2VecSuite.scala |  5 +-
 .../org/apache/spark/ml/impl/TreeTests.scala    |  5 +-
 .../org/apache/spark/ml/param/ParamsSuite.scala |  6 +--
 .../ml/param/shared/SharedParamsSuite.scala     |  5 +-
 .../spark/ml/recommendation/ALSSuite.scala      |  5 +-
 .../regression/DecisionTreeRegressorSuite.scala |  7 ++-
 .../spark/ml/regression/GBTRegressorSuite.scala |  5 +-
 .../ml/regression/LinearRegressionSuite.scala   |  5 +-
 .../regression/RandomForestRegressorSuite.scala |  7 ++-
 .../spark/ml/tuning/CrossValidatorSuite.scala   |  5 +-
 .../spark/ml/tuning/ParamGridBuilderSuite.scala |  5 +-
 .../mllib/api/python/PythonMLLibAPISuite.scala  |  5 +-
 .../LogisticRegressionSuite.scala               |  6 +--
 .../mllib/classification/NaiveBayesSuite.scala  |  7 ++-
 .../spark/mllib/classification/SVMSuite.scala   |  7 ++-
 .../StreamingLogisticRegressionSuite.scala      |  5 +-
 .../mllib/clustering/GaussianMixtureSuite.scala |  5 +-
 .../spark/mllib/clustering/KMeansSuite.scala    |  9 ++--
 .../spark/mllib/clustering/LDASuite.scala       |  5 +-
 .../PowerIterationClusteringSuite.scala         |  8 ++--
 .../mllib/clustering/StreamingKMeansSuite.scala |  5 +-
 .../mllib/evaluation/AreaUnderCurveSuite.scala  |  5 +-
 .../BinaryClassificationMetricsSuite.scala      |  5 +-
 .../evaluation/MulticlassMetricsSuite.scala     |  5 +-
 .../evaluation/MultilabelMetricsSuite.scala     |  5 +-
 .../mllib/evaluation/RankingMetricsSuite.scala  |  5 +-
 .../evaluation/RegressionMetricsSuite.scala     |  5 +-
 .../mllib/feature/ChiSqSelectorSuite.scala      |  5 +-
 .../mllib/feature/ElementwiseProductSuite.scala |  5 +-
 .../spark/mllib/feature/HashingTFSuite.scala    |  5 +-
 .../apache/spark/mllib/feature/IDFSuite.scala   |  5 +-
 .../spark/mllib/feature/NormalizerSuite.scala   |  5 +-
 .../apache/spark/mllib/feature/PCASuite.scala   |  5 +-
 .../mllib/feature/StandardScalerSuite.scala     |  5 +-
 .../spark/mllib/feature/Word2VecSuite.scala     |  5 +-
 .../apache/spark/mllib/fpm/FPGrowthSuite.scala  |  5 +-
 .../apache/spark/mllib/fpm/FPTreeSuite.scala    |  5 +-
 .../impl/PeriodicGraphCheckpointerSuite.scala   |  6 +--
 .../apache/spark/mllib/linalg/BLASSuite.scala   |  5 +-
 .../linalg/BreezeMatrixConversionSuite.scala    |  6 +--
 .../linalg/BreezeVectorConversionSuite.scala    |  6 +--
 .../spark/mllib/linalg/MatricesSuite.scala      |  4 +-
 .../spark/mllib/linalg/VectorsSuite.scala       |  5 +-
 .../linalg/distributed/BlockMatrixSuite.scala   |  5 +-
 .../distributed/CoordinateMatrixSuite.scala     |  5 +-
 .../distributed/IndexedRowMatrixSuite.scala     |  5 +-
 .../linalg/distributed/RowMatrixSuite.scala     |  6 +--
 .../optimization/GradientDescentSuite.scala     |  7 +--
 .../spark/mllib/optimization/LBFGSSuite.scala   |  7 +--
 .../spark/mllib/optimization/NNLSSuite.scala    |  5 +-
 ...naryClassificationPMMLModelExportSuite.scala |  4 +-
 .../GeneralizedLinearPMMLModelExportSuite.scala |  4 +-
 .../export/KMeansPMMLModelExportSuite.scala     |  4 +-
 .../export/PMMLModelExportFactorySuite.scala    |  5 +-
 .../mllib/random/RandomDataGeneratorSuite.scala |  5 +-
 .../spark/mllib/random/RandomRDDsSuite.scala    |  5 +-
 .../mllib/rdd/MLPairRDDFunctionsSuite.scala     |  5 +-
 .../spark/mllib/rdd/RDDFunctionsSuite.scala     |  5 +-
 .../spark/mllib/recommendation/ALSSuite.scala   |  4 +-
 .../MatrixFactorizationModelSuite.scala         |  5 +-
 .../regression/IsotonicRegressionSuite.scala    |  5 +-
 .../mllib/regression/LabeledPointSuite.scala    |  5 +-
 .../spark/mllib/regression/LassoSuite.scala     |  7 ++-
 .../regression/LinearRegressionSuite.scala      |  7 ++-
 .../mllib/regression/RidgeRegressionSuite.scala |  6 +--
 .../StreamingLinearRegressionSuite.scala        |  5 +-
 .../spark/mllib/stat/CorrelationSuite.scala     |  5 +-
 .../spark/mllib/stat/HypothesisTestSuite.scala  |  6 +--
 .../spark/mllib/stat/KernelDensitySuite.scala   |  4 +-
 .../MultivariateOnlineSummarizerSuite.scala     |  5 +-
 .../MultivariateGaussianSuite.scala             |  5 +-
 .../spark/mllib/tree/DecisionTreeSuite.scala    |  7 ++-
 .../mllib/tree/GradientBoostedTreesSuite.scala  |  5 +-
 .../apache/spark/mllib/tree/ImpuritySuite.scala |  5 +-
 .../spark/mllib/tree/RandomForestSuite.scala    |  5 +-
 .../mllib/tree/impl/BaggedPointSuite.scala      |  5 +-
 .../apache/spark/mllib/util/MLUtilsSuite.scala  |  5 +-
 .../spark/mllib/util/NumericParserSuite.scala   |  6 +--
 .../spark/mllib/util/TestingUtilsSuite.scala    |  4 +-
 repl/pom.xml                                    |  7 +++
 .../scala/org/apache/spark/repl/ReplSuite.scala |  5 +-
 .../scala/org/apache/spark/repl/ReplSuite.scala |  5 +-
 .../spark/repl/ExecutorClassLoaderSuite.scala   |  3 +-
 scalastyle-config.xml                           |  7 +++
 sql/catalyst/pom.xml                            |  7 +++
 .../spark/sql/catalyst/DistributionSuite.scala  |  5 +-
 .../sql/catalyst/ScalaReflectionSuite.scala     |  5 +-
 .../spark/sql/catalyst/SqlParserSuite.scala     |  4 +-
 .../sql/catalyst/analysis/AnalysisSuite.scala   |  5 +-
 .../analysis/DecimalPrecisionSuite.scala        |  5 +-
 .../expressions/AttributeSetSuite.scala         |  5 +-
 .../expressions/ExpressionEvaluationSuite.scala |  4 +-
 .../UnsafeFixedWidthAggregationMapSuite.scala   |  8 +++-
 .../expressions/UnsafeRowConverterSuite.scala   |  5 +-
 .../spark/sql/catalyst/plans/PlanTest.scala     |  5 +-
 .../sql/catalyst/plans/SameResultSuite.scala    |  5 +-
 .../sql/catalyst/trees/RuleExecutorSuite.scala  |  5 +-
 .../sql/catalyst/trees/TreeNodeSuite.scala      |  5 +-
 .../spark/sql/catalyst/util/MetadataSuite.scala |  4 +-
 .../spark/sql/types/DataTypeParserSuite.scala   |  4 +-
 .../apache/spark/sql/types/DataTypeSuite.scala  |  4 +-
 .../spark/sql/types/UTF8StringSuite.scala       |  4 +-
 .../spark/sql/types/decimal/DecimalSuite.scala  |  5 +-
 sql/core/pom.xml                                |  7 +++
 .../apache/spark/sql/DataFrameStatSuite.scala   |  4 +-
 .../apache/spark/sql/MathExpressionsSuite.scala |  2 +-
 .../scala/org/apache/spark/sql/RowSuite.scala   |  4 +-
 .../org/apache/spark/sql/SQLConfSuite.scala     |  5 +-
 .../org/apache/spark/sql/SQLContextSuite.scala  |  5 +-
 .../sql/ScalaReflectionRelationSuite.scala      |  5 +-
 .../apache/spark/sql/SerializationSuite.scala   |  6 +--
 .../spark/sql/columnar/ColumnStatsSuite.scala   |  5 +-
 .../spark/sql/columnar/ColumnTypeSuite.scala    |  5 +-
 .../columnar/NullableColumnAccessorSuite.scala  |  5 +-
 .../columnar/NullableColumnBuilderSuite.scala   |  5 +-
 .../columnar/PartitionBatchPruningSuite.scala   |  5 +-
 .../compression/BooleanBitSetSuite.scala        |  5 +-
 .../compression/DictionaryEncodingSuite.scala   |  5 +-
 .../compression/IntegralDeltaSuite.scala        |  5 +-
 .../compression/RunLengthEncodingSuite.scala    |  5 +-
 .../spark/sql/execution/PlannerSuite.scala      |  5 +-
 .../execution/SparkSqlSerializer2Suite.scala    |  6 +--
 .../sql/execution/debug/DebuggingSuite.scala    |  5 +-
 .../execution/joins/HashedRelationSuite.scala   |  5 +-
 .../org/apache/spark/sql/jdbc/JDBCSuite.scala   |  5 +-
 .../apache/spark/sql/jdbc/JDBCWriteSuite.scala  |  5 +-
 .../spark/sql/parquet/ParquetSchemaSuite.scala  |  4 +-
 .../sql/sources/ResolvedDataSourceSuite.scala   |  4 +-
 sql/hive-thriftserver/pom.xml                   |  7 +++
 .../spark/sql/hive/thriftserver/CliSuite.scala  |  6 +--
 .../thriftserver/HiveThriftServer2Suites.scala  |  6 +--
 sql/hive/pom.xml                                |  7 +++
 .../spark/sql/hive/HiveInspectorSuite.scala     |  4 +-
 .../sql/hive/HiveMetastoreCatalogSuite.scala    |  4 +-
 .../org/apache/spark/sql/hive/HiveQlSuite.scala |  5 +-
 .../spark/sql/hive/SerializationSuite.scala     |  6 +--
 .../spark/sql/hive/client/VersionsSuite.scala   |  5 +-
 .../hive/execution/ConcurrentHiveSuite.scala    |  6 +--
 .../sql/hive/execution/HiveComparisonTest.scala |  6 +--
 .../hive/orc/OrcPartitionDiscoverySuite.scala   |  5 +-
 .../spark/sql/hive/orc/OrcQuerySuite.scala      |  5 +-
 .../sql/sources/hadoopFsRelationSuites.scala    |  5 +-
 streaming/pom.xml                               |  7 +++
 .../spark/streaming/DStreamClosureSuite.scala   |  6 +--
 .../spark/streaming/DStreamScopeSuite.scala     |  6 +--
 .../streaming/ReceivedBlockHandlerSuite.scala   |  8 +++-
 .../streaming/ReceivedBlockTrackerSuite.scala   |  6 +--
 .../spark/streaming/StreamingContextSuite.scala |  6 +--
 .../apache/spark/streaming/TestSuiteBase.scala  |  6 +--
 .../spark/streaming/UISeleniumSuite.scala       |  2 +-
 .../rdd/WriteAheadLogBackedBlockRDDSuite.scala  |  6 +--
 .../scheduler/InputInfoTrackerSuite.scala       |  6 +--
 .../spark/streaming/ui/UIUtilsSuite.scala       |  5 +-
 .../util/RateLimitedOutputStreamSuite.scala     |  4 +-
 .../streaming/util/WriteAheadLogSuite.scala     |  6 +--
 yarn/pom.xml                                    |  7 +++
 .../ClientDistributedCacheManagerSuite.scala    |  5 +-
 .../apache/spark/deploy/yarn/ClientSuite.scala  |  6 +--
 .../spark/deploy/yarn/YarnAllocatorSuite.scala  |  6 +--
 .../spark/deploy/yarn/YarnClusterSuite.scala    |  6 +--
 .../deploy/yarn/YarnSparkHadoopUtilSuite.scala  |  6 +--
 367 files changed, 980 insertions(+), 969 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/bagel/pom.xml
----------------------------------------------------------------------
diff --git a/bagel/pom.xml b/bagel/pom.xml
index 1f3dec9..132cd43 100644
--- a/bagel/pom.xml
+++ b/bagel/pom.xml
@@ -41,6 +41,13 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-core_${scala.binary.version}</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.scalacheck</groupId>
       <artifactId>scalacheck_${scala.binary.version}</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
----------------------------------------------------------------------
diff --git a/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
index ccb262a..fb10d73 100644
--- a/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
+++ b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.bagel
 
-import org.scalatest.{BeforeAndAfter, FunSuite, Assertions}
+import org.scalatest.{BeforeAndAfter, Assertions}
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
@@ -27,7 +27,7 @@ import org.apache.spark.storage.StorageLevel
 class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
 class TestMessage(val targetId: String) extends Message[String] with Serializable
 
-class BagelSuite extends FunSuite with Assertions with BeforeAndAfter with Timeouts {
+class BagelSuite extends SparkFunSuite with Assertions with BeforeAndAfter with Timeouts {
 
   var sc: SparkContext = _
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 1f903fc..a021842 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -338,6 +338,12 @@
     <dependency>
       <groupId>org.seleniumhq.selenium</groupId>
       <artifactId>selenium-java</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+      </exclusions>
       <scope>test</scope>
     </dependency>
     <!-- Added for selenium: -->

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 746a40a..e942d65 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -20,11 +20,10 @@ package org.apache.spark
 import scala.collection.mutable
 import scala.ref.WeakReference
 
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
 
-class AccumulatorSuite extends FunSuite with Matchers with LocalSparkContext {
+class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
 
   implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala b/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
index 668ddf9..af81e46 100644
--- a/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CacheManagerSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark
 
 import org.mockito.Mockito._
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.mock.MockitoSugar
 
 import org.apache.spark.executor.DataReadMethod
@@ -26,7 +26,7 @@ import org.apache.spark.rdd.RDD
 import org.apache.spark.storage._
 
 // TODO: Test the CacheManager's thread-safety aspects
-class CacheManagerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter
+class CacheManagerSuite extends SparkFunSuite with LocalSparkContext with BeforeAndAfter
   with MockitoSugar {
 
   var blockManager: BlockManager = _

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index 91d8fde..d1761a4 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -21,13 +21,11 @@ import java.io.File
 
 import scala.reflect.ClassTag
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.rdd._
 import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
 import org.apache.spark.util.Utils
 
-class CheckpointSuite extends FunSuite with LocalSparkContext with Logging {
+class CheckpointSuite extends SparkFunSuite with LocalSparkContext with Logging {
   var checkpointDir: File = _
   val partitioner = new HashPartitioner(2)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
index 4a48f65..501fe18 100644
--- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable.{HashSet, SynchronizedSet}
 import scala.language.existentials
 import scala.util.Random
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.time.SpanSugar._
@@ -44,7 +44,7 @@ import org.apache.spark.storage.ShuffleIndexBlockId
  * config options, in particular, a different shuffle manager class
  */
 abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[HashShuffleManager])
-  extends FunSuite with BeforeAndAfter with LocalSparkContext
+  extends SparkFunSuite with BeforeAndAfter with LocalSparkContext
 {
   implicit val defaultTimeout = timeout(10000 millis)
   val conf = new SparkConf()

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/DistributedSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 96a9c20..9c191ed 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.Matchers
 import org.scalatest.time.{Millis, Span}
@@ -28,7 +27,7 @@ class NotSerializableClass
 class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {}
 
 
-class DistributedSuite extends FunSuite with Matchers with LocalSparkContext {
+class DistributedSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
   val clusterUrl = "local-cluster[2,1,512]"
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/DriverSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index c42dfbc..b226203 100644
--- a/core/src/test/scala/org/apache/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -19,14 +19,13 @@ package org.apache.spark
 
 import java.io.File
 
-import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.prop.TableDrivenPropertyChecks._
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark.util.Utils
 
-class DriverSuite extends FunSuite with Timeouts {
+class DriverSuite extends SparkFunSuite with Timeouts {
 
   ignore("driver should exit after finishing without cleanup (SPARK-530)") {
     val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index 84f787e..1c2b681 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark
 
 import scala.collection.mutable
 
-import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester}
+import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
 import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.scheduler._
 import org.apache.spark.scheduler.cluster.ExecutorInfo
@@ -28,7 +28,11 @@ import org.apache.spark.util.ManualClock
 /**
  * Test add and remove behavior of ExecutorAllocationManager.
  */
-class ExecutorAllocationManagerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter {
+class ExecutorAllocationManagerSuite
+  extends SparkFunSuite
+  with LocalSparkContext
+  with BeforeAndAfter {
+
   import ExecutorAllocationManager._
   import ExecutorAllocationManagerSuite._
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/FailureSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index e50da1c..a8c8c6f 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.util.NonSerializable
 
 import java.io.NotSerializableException
@@ -38,7 +36,7 @@ object FailureSuiteState {
   }
 }
 
-class FailureSuite extends FunSuite with LocalSparkContext {
+class FailureSuite extends SparkFunSuite with LocalSparkContext {
 
   // Run a 3-task map job in which task 1 deterministically fails once, and check
   // whether the job completes successfully and we ran 4 tasks in total.

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/FileServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index bff2d10..6e65b0a 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -24,13 +24,12 @@ import javax.net.ssl.SSLException
 
 import com.google.common.io.{ByteStreams, Files}
 import org.apache.commons.lang3.RandomUtils
-import org.scalatest.FunSuite
 
 import org.apache.spark.util.Utils
 
 import SSLSampleConfigs._
 
-class FileServerSuite extends FunSuite with LocalSparkContext {
+class FileServerSuite extends SparkFunSuite with LocalSparkContext {
 
   @transient var tmpDir: File = _
   @transient var tmpFile: File = _

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/FileSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index d67de86..1d8fade 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -30,12 +30,11 @@ import org.apache.hadoop.mapred.{JobConf, FileAlreadyExistsException, FileSplit,
 import org.apache.hadoop.mapreduce.Job
 import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
 import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
-import org.scalatest.FunSuite
 
 import org.apache.spark.rdd.{NewHadoopRDD, HadoopRDD}
 import org.apache.spark.util.Utils
 
-class FileSuite extends FunSuite with LocalSparkContext {
+class FileSuite extends SparkFunSuite with LocalSparkContext {
   var tempDir: File = _
 
   override def beforeEach() {

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FutureActionSuite.scala b/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
index f5cdb01..1102aea 100644
--- a/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
@@ -20,10 +20,14 @@ package org.apache.spark
 import scala.concurrent.Await
 import scala.concurrent.duration.Duration
 
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+import org.scalatest.{BeforeAndAfter, Matchers}
 
 
-class FutureActionSuite extends FunSuite with BeforeAndAfter with Matchers with LocalSparkContext {
+class FutureActionSuite
+  extends SparkFunSuite
+  with BeforeAndAfter
+  with Matchers
+  with LocalSparkContext {
 
   before {
     sc = new SparkContext("local", "FutureActionSuite")

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
index b789912..911b3bd 100644
--- a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
@@ -22,7 +22,6 @@ import scala.language.postfixOps
 
 import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.storage.BlockManagerId
-import org.scalatest.FunSuite
 import org.mockito.Mockito.{mock, spy, verify, when}
 import org.mockito.Matchers
 import org.mockito.Matchers._
@@ -31,7 +30,7 @@ import org.apache.spark.scheduler.TaskScheduler
 import org.apache.spark.util.RpcUtils
 import org.scalatest.concurrent.Eventually._
 
-class HeartbeatReceiverSuite extends FunSuite with LocalSparkContext {
+class HeartbeatReceiverSuite extends SparkFunSuite with LocalSparkContext {
 
   test("HeartbeatReceiver") {
     sc = spy(new SparkContext("local[2]", "test"))

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
index 67a4a97..4399f25 100644
--- a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.rdd.RDD
 
-class ImplicitOrderingSuite extends FunSuite with LocalSparkContext {
+class ImplicitOrderingSuite extends SparkFunSuite with LocalSparkContext {
   // Tests that PairRDDFunctions grabs an implicit Ordering in various cases where it should.
   test("basic inference of Orderings"){
     sc = new SparkContext("local", "test")

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
index ae17fc6..340a9e3 100644
--- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
@@ -24,7 +24,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
 import scala.concurrent.duration._
 import scala.concurrent.future
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.Matchers
 
 import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
@@ -34,7 +34,7 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
  * (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
  * in both FIFO and fair scheduling modes.
  */
-class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter
+class JobCancellationSuite extends SparkFunSuite with Matchers with BeforeAndAfter
   with LocalSparkContext {
 
   override def afterEach() {

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 6ed057a..1fab696 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -19,14 +19,13 @@ package org.apache.spark
 
 import org.mockito.Mockito._
 import org.mockito.Matchers.{any, isA}
-import org.scalatest.FunSuite
 
 import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcCallContext, RpcEnv}
 import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
 import org.apache.spark.shuffle.FetchFailedException
 import org.apache.spark.storage.BlockManagerId
 
-class MapOutputTrackerSuite extends FunSuite {
+class MapOutputTrackerSuite extends SparkFunSuite {
   private val conf = new SparkConf
 
   def createRpcEnv(name: String, host: String = "localhost", port: Int = 0,

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index 47e3bf6..3316f56 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -20,12 +20,12 @@ package org.apache.spark
 import scala.collection.mutable.ArrayBuffer
 import scala.math.abs
 
-import org.scalatest.{FunSuite, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.StatCounter
 
-class PartitioningSuite extends FunSuite with SharedSparkContext with PrivateMethodTester {
+class PartitioningSuite extends SparkFunSuite with SharedSparkContext with PrivateMethodTester {
 
   test("HashPartitioner equality") {
     val p2 = new HashPartitioner(2)

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
index 93f46ef..376481b 100644
--- a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
@@ -21,9 +21,9 @@ import java.io.File
 
 import com.google.common.io.Files
 import org.apache.spark.util.Utils
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.BeforeAndAfterAll
 
-class SSLOptionsSuite extends FunSuite with BeforeAndAfterAll {
+class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
   test("test resolving property file as spark conf ") {
     val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
index 61571be..e9b64aa 100644
--- a/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
@@ -19,11 +19,9 @@ package org.apache.spark
 
 import java.io.File
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.util.Utils
 
-class SecurityManagerSuite extends FunSuite {
+class SecurityManagerSuite extends SparkFunSuite {
 
   test("set security with conf") {
     val conf = new SparkConf

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index d718051..91f4ab3 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
 import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
@@ -26,7 +25,7 @@ import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.storage.{ShuffleDataBlockId, ShuffleBlockId}
 import org.apache.spark.util.MutablePair
 
-abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContext {
+abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
   val conf = new SparkConf(loadDefaults = false)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index fafc9d4..9fbaeb3 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -23,13 +23,12 @@ import scala.concurrent.duration._
 import scala.language.postfixOps
 import scala.util.{Try, Random}
 
-import org.scalatest.FunSuite
 import org.apache.spark.network.util.ByteUnit
 import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
 import org.apache.spark.util.{RpcUtils, ResetSystemProperties}
 import com.esotericsoftware.kryo.Kryo
 
-class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemProperties {
+class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
   test("Test byteString conversion") {
     val conf = new SparkConf()
     // Simply exercise the API, we don't need a complete conversion test since that's handled in

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index e6ab538..2bdbd70 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark
 
-import org.scalatest.{Assertions, FunSuite}
+import org.scalatest.Assertions
 import org.apache.spark.storage.StorageLevel
 
-class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
+class SparkContextInfoSuite extends SparkFunSuite with LocalSparkContext {
   test("getPersistentRDDs only returns RDDs that are marked as cached") {
     sc = new SparkContext("local", "test")
     assert(sc.getPersistentRDDs.isEmpty === true)

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index bbed8dd..bf72479 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark
 
-import org.scalatest.{FunSuite, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
 import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
@@ -25,7 +25,7 @@ import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, Me
 import org.apache.spark.scheduler.local.LocalBackend
 
 class SparkContextSchedulerCreationSuite
-  extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging {
+  extends SparkFunSuite with LocalSparkContext with PrivateMethodTester with Logging {
 
   def createTaskScheduler(master: String): TaskSchedulerImpl =
     createTaskScheduler(master, new SparkConf())

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 6b43b4e..6838b35 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -23,8 +23,6 @@ import java.util.concurrent.TimeUnit
 import com.google.common.base.Charsets._
 import com.google.common.io.Files
 
-import org.scalatest.FunSuite
-
 import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
 import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
@@ -33,7 +31,7 @@ import org.apache.spark.util.Utils
 import scala.concurrent.Await
 import scala.concurrent.duration.Duration
 
-class SparkContextSuite extends FunSuite with LocalSparkContext {
+class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
 
   test("Only one SparkContext may be active at a time") {
     // Regression test for SPARK-4180

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
new file mode 100644
index 0000000..8cb3443
--- /dev/null
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark
+
+// scalastyle:off
+import org.scalatest.{FunSuite, Outcome}
+
+/**
+ * Base abstract class for all unit tests in Spark for handling common functionality.
+ */
+private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
+// scalastyle:on
+
+  /**
+   * Log the suite name and the test name before and after each test.
+   *
+   * Subclasses should never override this method. If they wish to run
+   * custom code before and after each test, they should should mix in
+   * the {{org.scalatest.BeforeAndAfter}} trait instead.
+   */
+  final protected override def withFixture(test: NoArgTest): Outcome = {
+    val testName = test.text
+    val suiteName = this.getClass.getName
+    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
+    try {
+      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
+      test()
+    } finally {
+      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala b/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
index 084eb23..46516e8 100644
--- a/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
@@ -21,12 +21,12 @@ import scala.concurrent.duration._
 import scala.language.implicitConversions
 import scala.language.postfixOps
 
-import org.scalatest.{Matchers, FunSuite}
+import org.scalatest.Matchers
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark.JobExecutionStatus._
 
-class StatusTrackerSuite extends FunSuite with Matchers with LocalSparkContext {
+class StatusTrackerSuite extends SparkFunSuite with Matchers with LocalSparkContext {
 
   test("basic status API usage") {
     sc = new SparkContext("local", "test", new SparkConf(false))

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index 10917c8..6580139 100644
--- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -22,7 +22,6 @@ import java.util.concurrent.atomic.AtomicBoolean
 import java.util.concurrent.atomic.AtomicInteger
 
 import org.apache.spark.scheduler._
-import org.scalatest.FunSuite
 
 /**
  * Holds state shared across task threads in some ThreadingSuite tests.
@@ -37,7 +36,7 @@ object ThreadingSuiteState {
   }
 }
 
-class ThreadingSuite extends FunSuite with LocalSparkContext {
+class ThreadingSuite extends SparkFunSuite with LocalSparkContext {
 
   test("accessing SparkContext form a different thread") {
     sc = new SparkContext("local", "test")

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 42ff059..f7a13ab 100644
--- a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -17,11 +17,10 @@
 
 package org.apache.spark
 
-import org.scalatest.FunSuite
 import org.scalatest.concurrent.Timeouts._
 import org.scalatest.time.{Millis, Span}
 
-class UnpersistSuite extends FunSuite with LocalSparkContext {
+class UnpersistSuite extends SparkFunSuite with LocalSparkContext {
   test("unpersist RDD") {
     sc = new SparkContext("local", "test")
     val rdd = sc.makeRDD(Array(1, 2, 3, 4), 2).cache()

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
index 8959a84..135c56b 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
@@ -21,15 +21,15 @@ import scala.io.Source
 
 import java.io.{PrintWriter, File}
 
-import org.scalatest.{Matchers, FunSuite}
+import org.scalatest.Matchers
 
-import org.apache.spark.{SharedSparkContext, SparkConf}
+import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
 import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.util.Utils
 
 // This test suite uses SharedSparkContext because we need a SparkEnv in order to deserialize
 // a PythonBroadcast:
-class PythonBroadcastSuite extends FunSuite with Matchers with SharedSparkContext {
+class PythonBroadcastSuite extends SparkFunSuite with Matchers with SharedSparkContext {
   test("PythonBroadcast can be serialized with Kryo (SPARK-4882)") {
     val tempDir = Utils.createTempDir()
     val broadcastedString = "Hello, world!"

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
index c63d834..41f2a5c 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala
@@ -19,9 +19,9 @@ package org.apache.spark.api.python
 
 import java.io.{ByteArrayOutputStream, DataOutputStream}
 
-import org.scalatest.FunSuite
+import org.apache.spark.SparkFunSuite
 
-class PythonRDDSuite extends FunSuite {
+class PythonRDDSuite extends SparkFunSuite {
 
   test("Writing large strings to the worker") {
     val input: List[String] = List("a"*100000)

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala b/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
index f8c3932..267a79f 100644
--- a/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/SerDeUtilSuite.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark.api.python
 
-import org.scalatest.FunSuite
+import org.apache.spark.{SharedSparkContext, SparkFunSuite}
 
-import org.apache.spark.SharedSparkContext
-
-class SerDeUtilSuite extends FunSuite with SharedSparkContext {
+class SerDeUtilSuite extends SparkFunSuite with SharedSparkContext {
 
   test("Converting an empty pair RDD to python does not throw an exception (SPARK-5441)") {
     val emptyRdd = sc.makeRDD(Seq[(Any, Any)]())

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala b/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
index 4c85857..e1b9070 100644
--- a/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/broadcast/BroadcastSuite.scala
@@ -19,10 +19,10 @@ package org.apache.spark.broadcast
 
 import scala.util.Random
 
-import org.scalatest.{Assertions, FunSuite}
+import org.scalatest.Assertions
 import org.scalatest.concurrent.Eventually._
 
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkEnv}
+import org.apache.spark._
 import org.apache.spark.io.SnappyCompressionCodec
 import org.apache.spark.rdd.RDD
 import org.apache.spark.serializer.JavaSerializer
@@ -44,7 +44,7 @@ class DummyBroadcastClass(rdd: RDD[Int]) extends Serializable {
   }
 }
 
-class BroadcastSuite extends FunSuite with LocalSparkContext {
+class BroadcastSuite extends SparkFunSuite with LocalSparkContext {
 
   private val httpConf = broadcastConf("HttpBroadcastFactory")
   private val torrentConf = broadcastConf("TorrentBroadcastFactory")

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala b/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
index 745f9ee..6a99dbc 100644
--- a/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.deploy
 
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
-class ClientSuite extends FunSuite with Matchers {
+import org.apache.spark.SparkFunSuite
+
+class ClientSuite extends SparkFunSuite with Matchers {
   test("correctly validates driver jar URL's") {
     ClientArguments.isValidJarUrl("http://someHost:8080/foo.jar") should be (true)
     ClientArguments.isValidJarUrl("https://someHost:8080/foo.jar") should be (true)

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index e04a792..08529e0 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -23,14 +23,13 @@ import java.util.Date
 import com.fasterxml.jackson.core.JsonParseException
 import org.json4s._
 import org.json4s.jackson.JsonMethods
-import org.scalatest.FunSuite
 
 import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
 import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo}
 import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
-import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf}
+import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf, SparkFunSuite}
 
-class JsonProtocolSuite extends FunSuite with JsonTestUtils {
+class JsonProtocolSuite extends SparkFunSuite with JsonTestUtils {
 
   test("writeApplicationInfo") {
     val output = JsonProtocol.writeApplicationInfo(createAppInfo())

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
index 82f506c..ddc9281 100644
--- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
@@ -23,13 +23,11 @@ import scala.collection.JavaConversions._
 import scala.collection.mutable
 import scala.io.Source
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.scheduler.cluster.ExecutorInfo
 import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener}
-import org.apache.spark.{SparkConf, SparkContext, LocalSparkContext}
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
 
-class LogUrlsStandaloneSuite extends FunSuite with LocalSparkContext {
+class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
 
   /** Length of time to wait while draining listener events. */
   private val WAIT_TIMEOUT_MILLIS = 10000

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
index 80f2cc0..473a2d7 100644
--- a/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/PythonRunnerSuite.scala
@@ -17,11 +17,10 @@
 
 package org.apache.spark.deploy
 
-import org.scalatest.FunSuite
-
+import org.apache.spark.SparkFunSuite
 import org.apache.spark.util.Utils
 
-class PythonRunnerSuite extends FunSuite {
+class PythonRunnerSuite extends SparkFunSuite {
 
   // Test formatting a single path to be added to the PYTHONPATH
   test("format path") {

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index ea9227a..4636945 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
 
 import com.google.common.base.Charsets.UTF_8
 import com.google.common.io.ByteStreams
-import org.scalatest.FunSuite
 import org.scalatest.Matchers
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
@@ -35,7 +34,12 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
 
 // Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch
 // of properties that neeed to be cleared after tests.
-class SparkSubmitSuite extends FunSuite with Matchers with ResetSystemProperties with Timeouts {
+class SparkSubmitSuite
+  extends SparkFunSuite
+  with Matchers
+  with ResetSystemProperties
+  with Timeouts {
+
   def beforeAll() {
     System.setProperty("spark.testing", "true")
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 088ca3c..8fda5c8 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -20,15 +20,16 @@ package org.apache.spark.deploy
 import java.io.{File, PrintStream, OutputStream}
 
 import scala.collection.mutable.ArrayBuffer
-import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.ivy.core.module.descriptor.MDArtifact
 import org.apache.ivy.core.settings.IvySettings
 import org.apache.ivy.plugins.resolver.IBiblioResolver
 
+import org.apache.spark.SparkFunSuite
 import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
 
-class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll {
+class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
   private val noOpOutputStream = new OutputStream {
     def write(b: Int) = {}

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index a0a0afa..0f6933d 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -25,15 +25,15 @@ import scala.io.Source
 
 import org.apache.hadoop.fs.Path
 import org.json4s.jackson.JsonMethods._
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 import org.scalatest.Matchers
 
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
 import org.apache.spark.io._
 import org.apache.spark.scheduler._
 import org.apache.spark.util.{JsonProtocol, ManualClock, Utils}
 
-class FsHistoryProviderSuite extends FunSuite with BeforeAndAfter with Matchers with Logging {
+class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
 
   private var testDir: File = null
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index e10dd4c..14f2d1a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -22,10 +22,10 @@ import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
 
 import org.apache.commons.io.{FileUtils, IOUtils}
 import org.mockito.Mockito.when
-import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
+import org.scalatest.{BeforeAndAfter, Matchers}
 import org.scalatest.mock.MockitoSugar
 
-import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf}
+import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.ui.SparkUI
 
 /**
@@ -39,7 +39,7 @@ import org.apache.spark.ui.SparkUI
  * expectations.  However, in general this should be done with extreme caution, as the metrics
  * are considered part of Spark's public api.
  */
-class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with MockitoSugar
+class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers with MockitoSugar
   with JsonTestUtils {
 
   private val logDir = new File("src/test/resources/spark-events")

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
index f97e5ff..014e87b 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala
@@ -27,14 +27,14 @@ import scala.language.postfixOps
 import akka.actor.Address
 import org.json4s._
 import org.json4s.jackson.JsonMethods._
-import org.scalatest.{FunSuite, Matchers}
+import org.scalatest.Matchers
 import org.scalatest.concurrent.Eventually
 import other.supplier.{CustomPersistenceEngine, CustomRecoveryModeFactory}
 
-import org.apache.spark.{SparkConf, SparkException}
+import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
 import org.apache.spark.deploy._
 
-class MasterSuite extends FunSuite with Matchers with Eventually {
+class MasterSuite extends SparkFunSuite with Matchers with Eventually {
 
   test("toAkkaUrl") {
     val conf = new SparkConf(loadDefaults = false)

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
index f4d548d..197f68e 100644
--- a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
@@ -25,7 +25,7 @@ import scala.collection.mutable
 
 import akka.actor.{Actor, ActorRef, ActorSystem, Props}
 import com.google.common.base.Charsets
-import org.scalatest.{BeforeAndAfterEach, FunSuite}
+import org.scalatest.BeforeAndAfterEach
 import org.json4s.JsonAST._
 import org.json4s.jackson.JsonMethods._
 
@@ -38,7 +38,7 @@ import org.apache.spark.deploy.master.DriverState._
 /**
  * Tests for the REST application submission protocol used in standalone cluster mode.
  */
-class StandaloneRestSubmitSuite extends FunSuite with BeforeAndAfterEach {
+class StandaloneRestSubmitSuite extends SparkFunSuite with BeforeAndAfterEach {
   private var actorSystem: Option[ActorSystem] = None
   private var server: Option[RestSubmissionServer] = None
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
index 61071ee..115ac05 100644
--- a/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/rest/SubmitRestProtocolSuite.scala
@@ -21,14 +21,13 @@ import java.lang.Boolean
 import java.lang.Integer
 
 import org.json4s.jackson.JsonMethods._
-import org.scalatest.FunSuite
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
 /**
  * Tests for the REST application submission protocol.
  */
-class SubmitRestProtocolSuite extends FunSuite {
+class SubmitRestProtocolSuite extends SparkFunSuite {
 
   test("validate") {
     val request = new DummyRequest

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
index 1c27d83..5b3930c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.deploy.worker
 
+import org.apache.spark.SparkFunSuite
 import org.apache.spark.deploy.Command
 import org.apache.spark.util.Utils
-import org.scalatest.{FunSuite, Matchers}
+import org.scalatest.Matchers
 
-class CommandUtilsSuite extends FunSuite with Matchers {
+class CommandUtilsSuite extends SparkFunSuite with Matchers {
 
   test("set libraryPath correctly") {
     val appId = "12345-worker321-9876"

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
index 2159fd8..6258c18 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
@@ -23,13 +23,12 @@ import org.mockito.Mockito._
 import org.mockito.Matchers._
 import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
-import org.scalatest.FunSuite
 
-import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.deploy.{Command, DriverDescription}
 import org.apache.spark.util.Clock
 
-class DriverRunnerTest extends FunSuite {
+class DriverRunnerTest extends SparkFunSuite {
   private def createDriverRunner() = {
     val command = new Command("mainClass", Seq(), Map(), Seq(), Seq(), Seq())
     val driverDescription = new DriverDescription("jarUrl", 512, 1, true, command)

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
index a8b9df2..3da9927 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
@@ -21,12 +21,10 @@ import java.io.File
 
 import scala.collection.JavaConversions._
 
-import org.scalatest.FunSuite
-
 import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
-class ExecutorRunnerTest extends FunSuite {
+class ExecutorRunnerTest extends SparkFunSuite {
   test("command includes appId") {
     val appId = "12345-worker321-9876"
     val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
index e432b8e..15f7ca4 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerArgumentsTest.scala
@@ -18,11 +18,10 @@
 
 package org.apache.spark.deploy.worker
 
-import org.apache.spark.SparkConf
-import org.scalatest.FunSuite
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
 
-class WorkerArgumentsTest extends FunSuite {
+class WorkerArgumentsTest extends SparkFunSuite {
 
   test("Memory can't be set to 0 when cmd line args leave off M or G") {
     val conf = new SparkConf

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
index 93a779d..0f4d3b2 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.deploy.worker
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 import org.apache.spark.deploy.Command
 
-import org.scalatest.{Matchers, FunSuite}
+import org.scalatest.Matchers
 
-class WorkerSuite extends FunSuite with Matchers {
+class WorkerSuite extends SparkFunSuite with Matchers {
 
   def cmd(javaOpts: String*): Command = {
     Command("", Seq.empty, Map.empty, Seq.empty, Seq.empty, Seq(javaOpts : _*))

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
index 6a6f29d..ac18f04 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
@@ -18,12 +18,11 @@
 package org.apache.spark.deploy.worker
 
 import akka.actor.AddressFromURIString
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 import org.apache.spark.SecurityManager
 import org.apache.spark.rpc.{RpcAddress, RpcEnv}
-import org.scalatest.FunSuite
 
-class WorkerWatcherSuite extends FunSuite {
+class WorkerWatcherSuite extends SparkFunSuite {
   test("WorkerWatcher shuts down on valid disassociation") {
     val conf = new SparkConf()
     val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf))

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
index da53214..72eaffb 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ui/LogPageSuite.scala
@@ -20,9 +20,11 @@ package org.apache.spark.deploy.worker.ui
 import java.io.{File, FileWriter}
 
 import org.mockito.Mockito.{mock, when}
-import org.scalatest.{FunSuite, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
-class LogPageSuite extends FunSuite with PrivateMethodTester {
+import org.apache.spark.SparkFunSuite
+
+class LogPageSuite extends SparkFunSuite with PrivateMethodTester {
 
   test("get logs simple") {
     val webui = mock(classOf[WorkerWebUI])

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala b/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
index 326e203..8275fd8 100644
--- a/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/executor/TaskMetricsSuite.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.executor
 
-import org.scalatest.FunSuite
+import org.apache.spark.SparkFunSuite
 
-class TaskMetricsSuite extends FunSuite {
+class TaskMetricsSuite extends SparkFunSuite {
   test("[SPARK-5701] updateShuffleReadMetrics: ShuffleReadMetrics not added when no shuffle deps") {
     val taskMetrics = new TaskMetrics()
     taskMetrics.updateShuffleReadMetrics()

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
index 2e58c15..63947df 100644
--- a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
@@ -24,11 +24,10 @@ import java.io.FileOutputStream
 import scala.collection.immutable.IndexedSeq
 
 import org.scalatest.BeforeAndAfterAll
-import org.scalatest.FunSuite
 
 import org.apache.hadoop.io.Text
 
-import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
 import org.apache.spark.util.Utils
 import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, GzipCodec}
 
@@ -37,7 +36,7 @@ import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, Gzi
  * [[org.apache.spark.input.WholeTextFileRecordReader WholeTextFileRecordReader]]. A temporary
  * directory is created as fake input. Temporal storage would be deleted in the end.
  */
-class WholeTextFileRecordReaderSuite extends FunSuite with BeforeAndAfterAll {
+class WholeTextFileRecordReaderSuite extends SparkFunSuite with BeforeAndAfterAll {
   private var sc: SparkContext = _
   private var factory: CompressionCodecFactory = _
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index cf6a143..cbdb33c 100644
--- a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -20,11 +20,10 @@ package org.apache.spark.io
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 
 import com.google.common.io.ByteStreams
-import org.scalatest.FunSuite
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkFunSuite}
 
-class CompressionCodecSuite extends FunSuite {
+class CompressionCodecSuite extends SparkFunSuite {
   val conf = new SparkConf(false)
 
   def testCodec(codec: CompressionCodec) {

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
index 60dba3b..19f1af0 100644
--- a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
@@ -36,14 +36,14 @@ import org.apache.hadoop.mapreduce.lib.input.{CombineFileInputFormat => NewCombi
 import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
 import org.apache.hadoop.mapreduce.{TaskAttemptContext, InputSplit => NewInputSplit,
   RecordReader => NewRecordReader}
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 
-import org.apache.spark.SharedSparkContext
+import org.apache.spark.{SharedSparkContext, SparkFunSuite}
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
 import org.apache.spark.util.Utils
 
-class InputOutputMetricsSuite extends FunSuite with SharedSparkContext
+class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext
   with BeforeAndAfter {
 
   @transient var tmpDir: File = _

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index 100ac77..a901a06 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -17,9 +17,11 @@
 
 package org.apache.spark.metrics
 
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.BeforeAndAfter
 
-class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
+import org.apache.spark.SparkFunSuite
+
+class MetricsConfigSuite extends SparkFunSuite with BeforeAndAfter {
   var filePath: String = _
 
   before {

http://git-wip-us.apache.org/repos/asf/spark/blob/bfe74b34/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index bbdc956..9c389c7 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.metrics
 
-import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester}
+import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
 
-import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.deploy.master.MasterSource
 import org.apache.spark.metrics.source.Source
 
@@ -27,7 +27,7 @@ import com.codahale.metrics.MetricRegistry
 
 import scala.collection.mutable.ArrayBuffer
 
-class MetricsSystemSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester{
+class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateMethodTester{
   var filePath: String = _
   var conf: SparkConf = null
   var securityMgr: SecurityManager = null


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org