You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2016/01/06 04:07:56 UTC
[4/4] spark git commit: [SPARK-3873][TESTS] Import ordering fixes.
[SPARK-3873][TESTS] Import ordering fixes.
Author: Marcelo Vanzin <va...@cloudera.com>
Closes #10582 from vanzin/SPARK-3873-tests.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b3ba1be3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b3ba1be3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b3ba1be3
Branch: refs/heads/master
Commit: b3ba1be3b77e42120145252b2730a56f1d55fd21
Parents: 7a375bb
Author: Marcelo Vanzin <va...@cloudera.com>
Authored: Tue Jan 5 19:07:39 2016 -0800
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Tue Jan 5 19:07:39 2016 -0800
----------------------------------------------------------------------
.../org/apache/spark/ContextCleanerSuite.scala | 10 +++-------
.../spark/ExecutorAllocationManagerSuite.scala | 1 +
.../scala/org/apache/spark/FailureSuite.scala | 4 ++--
.../scala/org/apache/spark/FileServerSuite.scala | 4 ++--
.../test/scala/org/apache/spark/FileSuite.scala | 9 ++++-----
.../org/apache/spark/HeartbeatReceiverSuite.scala | 6 +++---
.../org/apache/spark/LocalSparkContext.scala | 2 +-
.../org/apache/spark/MapOutputTrackerSuite.scala | 4 ++--
.../scala/org/apache/spark/SSLOptionsSuite.scala | 3 ++-
.../scala/org/apache/spark/ShuffleSuite.scala | 6 +++---
.../scala/org/apache/spark/SortShuffleSuite.scala | 2 +-
.../scala/org/apache/spark/SparkConfSuite.scala | 9 +++++----
.../org/apache/spark/SparkContextInfoSuite.scala | 1 +
.../SparkContextSchedulerCreationSuite.scala | 2 +-
.../org/apache/spark/SparkContextSuite.scala | 10 +++++-----
.../scala/org/apache/spark/ThreadingSuite.scala | 5 ++---
.../spark/api/python/PythonBroadcastSuite.scala | 4 ++--
.../org/apache/spark/deploy/DeployTestUtils.scala | 2 +-
.../org/apache/spark/deploy/IvyTestUtils.scala | 4 +---
.../apache/spark/deploy/JsonProtocolSuite.scala | 2 +-
.../spark/deploy/LogUrlsStandaloneSuite.scala | 4 ++--
.../apache/spark/deploy/RPackageUtilsSuite.scala | 4 ++--
.../spark/deploy/SparkSubmitUtilsSuite.scala | 4 ++--
.../deploy/history/FsHistoryProviderSuite.scala | 2 +-
.../deploy/master/PersistenceEngineSuite.scala | 2 +-
.../spark/deploy/master/ui/MasterWebUISuite.scala | 4 ++--
.../deploy/rest/StandaloneRestSubmitSuite.scala | 8 ++++----
.../spark/deploy/worker/CommandUtilsSuite.scala | 3 ++-
.../spark/deploy/worker/DriverRunnerTest.scala | 2 +-
.../spark/deploy/worker/ExecutorRunnerTest.scala | 2 +-
.../apache/spark/deploy/worker/WorkerSuite.scala | 4 ++--
.../spark/deploy/worker/WorkerWatcherSuite.scala | 5 ++---
.../input/WholeTextFileRecordReaderSuite.scala | 5 ++---
.../apache/spark/memory/MemoryManagerSuite.scala | 2 +-
.../apache/spark/memory/MemoryTestingUtils.scala | 2 +-
.../apache/spark/memory/TestMemoryManager.scala | 2 +-
.../spark/metrics/InputOutputMetricsSuite.scala | 10 +++++-----
.../apache/spark/metrics/MetricsConfigSuite.scala | 3 +--
.../apache/spark/metrics/MetricsSystemSuite.scala | 7 +++----
.../netty/NettyBlockTransferSecuritySuite.scala | 13 +++++++------
.../netty/NettyBlockTransferServiceSuite.scala | 5 +++--
.../apache/spark/rdd/LocalCheckpointSuite.scala | 4 ++--
.../apache/spark/rdd/PairRDDFunctionsSuite.scala | 16 ++++++++--------
.../org/apache/spark/rdd/PipedRDDSuite.scala | 8 ++++----
.../scala/org/apache/spark/rdd/RDDSuite.scala | 8 ++++----
.../scala/org/apache/spark/rpc/RpcEnvSuite.scala | 4 ++--
.../org/apache/spark/rpc/netty/InboxSuite.scala | 2 +-
.../spark/rpc/netty/NettyRpcHandlerSuite.scala | 4 ++--
.../CoarseGrainedSchedulerBackendSuite.scala | 2 +-
.../spark/scheduler/DAGSchedulerSuite.scala | 2 +-
.../apache/spark/scheduler/MapStatusSuite.scala | 8 ++++----
.../spark/scheduler/NotSerializableFakeTask.scala | 2 +-
.../OutputCommitCoordinatorIntegrationSuite.scala | 4 ++--
.../scheduler/OutputCommitCoordinatorSuite.scala | 13 ++++++-------
.../spark/scheduler/SparkListenerSuite.scala | 3 +--
.../apache/spark/scheduler/TaskContextSuite.scala | 6 ++----
.../mesos/CoarseMesosSchedulerBackendSuite.scala | 8 ++++----
.../mesos/MesosSchedulerBackendSuite.scala | 8 ++++----
.../mesos/MesosClusterSchedulerSuite.scala | 3 +--
.../serializer/GenericAvroSerializerSuite.scala | 6 +++---
.../KryoSerializerDistributedSuite.scala | 3 +--
.../KryoSerializerResizableOutputSuite.scala | 3 +--
.../spark/serializer/KryoSerializerSuite.scala | 5 ++---
.../apache/spark/serializer/TestSerializer.scala | 3 +--
.../sort/BypassMergeSortShuffleWriterSuite.scala | 6 +++---
.../storage/BlockManagerReplicationSuite.scala | 4 ++--
.../apache/spark/storage/BlockManagerSuite.scala | 7 +++----
.../org/apache/spark/storage/LocalDirsSuite.scala | 3 +--
.../org/apache/spark/ui/UISeleniumSuite.scala | 6 +++---
.../test/scala/org/apache/spark/ui/UISuite.scala | 2 +-
.../ui/scope/RDDOperationGraphListenerSuite.scala | 3 ---
.../apache/spark/ui/storage/StorageTabSuite.scala | 1 +
.../apache/spark/util/ClosureCleanerSuite.scala | 2 +-
.../org/apache/spark/util/FileAppenderSuite.scala | 5 ++---
.../org/apache/spark/util/JsonProtocolSuite.scala | 5 ++---
.../apache/spark/util/SizeEstimatorSuite.scala | 2 +-
.../org/apache/spark/util/ThreadUtilsSuite.scala | 2 +-
.../scala/org/apache/spark/util/UtilsSuite.scala | 3 ++-
.../util/collection/ExternalSorterSuite.scala | 4 +---
.../unsafe/sort/PrefixComparatorsSuite.scala | 1 +
.../spark/util/random/RandomSamplerSuite.scala | 3 ++-
.../spark/util/random/XORShiftRandomSuite.scala | 5 ++---
.../sql/jdbc/DockerJDBCIntegrationSuite.scala | 2 +-
.../spark/sql/jdbc/PostgresIntegrationSuite.scala | 2 +-
.../scala/org/apache/spark/util/DockerUtils.scala | 2 +-
.../streaming/flume/sink/SparkSinkSuite.scala | 2 +-
.../apache/spark/streaming/TestOutputStream.scala | 6 +++---
.../streaming/flume/FlumePollingStreamSuite.scala | 4 ++--
.../streaming/kafka/DirectKafkaStreamSuite.scala | 5 ++---
.../spark/streaming/kafka/KafkaRDDSuite.scala | 2 +-
.../spark/streaming/mqtt/MQTTTestUtils.scala | 2 +-
.../streaming/twitter/TwitterStreamSuite.scala | 5 ++---
.../spark/graphx/impl/EdgePartitionSuite.scala | 3 +--
.../spark/graphx/impl/VertexPartitionSuite.scala | 3 +--
.../classification/LogisticRegressionSuite.scala | 2 +-
.../spark/ml/classification/OneVsRestSuite.scala | 4 ++--
.../spark/ml/feature/InteractionSuite.scala | 2 +-
.../org/apache/spark/ml/feature/PCASuite.scala | 2 +-
.../ml/feature/PolynomialExpansionSuite.scala | 2 +-
.../ml/feature/QuantileDiscretizerSuite.scala | 2 +-
.../spark/ml/feature/StandardScalerSuite.scala | 1 -
.../spark/ml/feature/StringIndexerSuite.scala | 2 +-
.../spark/ml/feature/VectorAssemblerSuite.scala | 2 +-
.../spark/ml/feature/VectorSlicerSuite.scala | 2 +-
.../apache/spark/ml/feature/Word2VecSuite.scala | 2 +-
.../org/apache/spark/ml/impl/TreeTests.scala | 3 +--
.../apache/spark/ml/recommendation/ALSSuite.scala | 3 +--
.../regression/DecisionTreeRegressorSuite.scala | 3 +--
.../ml/regression/LinearRegressionSuite.scala | 2 +-
.../spark/ml/tuning/CrossValidatorSuite.scala | 10 +++++-----
.../spark/ml/util/DefaultReadWriteTest.scala | 2 +-
.../mllib/api/python/PythonMLLibAPISuite.scala | 4 ++--
.../StreamingLogisticRegressionSuite.scala | 2 +-
.../mllib/clustering/GaussianMixtureSuite.scala | 2 +-
.../apache/spark/mllib/clustering/LDASuite.scala | 2 +-
.../mllib/evaluation/RankingMetricsSuite.scala | 2 +-
.../org/apache/spark/mllib/feature/IDFSuite.scala | 2 +-
.../spark/mllib/feature/StandardScalerSuite.scala | 2 +-
.../spark/mllib/feature/Word2VecSuite.scala | 1 -
.../org/apache/spark/mllib/linalg/BLASSuite.scala | 2 +-
.../linalg/BreezeMatrixConversionSuite.scala | 2 +-
.../apache/spark/mllib/linalg/VectorsSuite.scala | 2 +-
.../linalg/distributed/BlockMatrixSuite.scala | 2 +-
.../distributed/CoordinateMatrixSuite.scala | 2 +-
.../distributed/IndexedRowMatrixSuite.scala | 2 +-
.../mllib/linalg/distributed/RowMatrixSuite.scala | 4 ++--
.../mllib/optimization/GradientDescentSuite.scala | 2 +-
.../spark/mllib/random/RandomRDDsSuite.scala | 4 ++--
.../spark/mllib/rdd/MLPairRDDFunctionsSuite.scala | 2 +-
.../spark/mllib/rdd/RDDFunctionsSuite.scala | 2 +-
.../spark/mllib/regression/LassoSuite.scala | 2 +-
.../mllib/regression/LinearRegressionSuite.scala | 2 +-
.../mllib/regression/RidgeRegressionSuite.scala | 2 +-
.../spark/mllib/stat/StreamingTestSuite.scala | 4 ++--
.../distribution/MultivariateGaussianSuite.scala | 2 +-
.../spark/mllib/tree/DecisionTreeSuite.scala | 2 +-
.../spark/mllib/tree/EnsembleTestHelper.scala | 4 ++--
.../mllib/tree/GradientBoostedTreesSuite.scala | 5 ++---
.../mllib/util/LocalClusterSparkContext.scala | 2 +-
.../apache/spark/mllib/util/TestingUtils.scala | 3 ++-
.../spark/mllib/util/TestingUtilsSuite.scala | 3 ++-
.../spark/repl/ExecutorClassLoaderSuite.scala | 8 ++++----
.../test/scala/org/apache/spark/sql/RowTest.scala | 3 ++-
.../spark/sql/catalyst/DistributionSuite.scala | 3 +--
.../spark/sql/catalyst/SqlParserSuite.scala | 4 ++--
.../catalyst/analysis/AnalysisErrorSuite.scala | 12 ++++++------
.../sql/catalyst/analysis/AnalysisTest.scala | 2 +-
.../catalyst/analysis/DecimalPrecisionSuite.scala | 4 ++--
.../analysis/ExpressionTypeCheckingSuite.scala | 2 +-
.../catalyst/analysis/HiveTypeCoercionSuite.scala | 3 +--
.../encoders/ExpressionEncoderSuite.scala | 7 ++++---
.../sql/catalyst/encoders/RowEncoderSuite.scala | 2 +-
.../sql/catalyst/expressions/CastSuite.scala | 4 ++--
.../expressions/ConditionalExpressionSuite.scala | 3 +--
.../expressions/DecimalExpressionSuite.scala | 3 +--
.../catalyst/expressions/MiscFunctionsSuite.scala | 2 +-
.../sql/catalyst/expressions/OrderingSuite.scala | 4 ++--
.../aggregate/HyperLogLogPlusPlusSuite.scala | 9 +++++----
.../optimizer/AggregateOptimizeSuite.scala | 2 +-
.../optimizer/BooleanSimplificationSuite.scala | 6 +++---
.../catalyst/optimizer/ColumnPruningSuite.scala | 4 ++--
.../catalyst/optimizer/CombiningLimitsSuite.scala | 4 ++--
.../catalyst/optimizer/ConstantFoldingSuite.scala | 11 +++++------
.../catalyst/optimizer/FilterPushdownSuite.scala | 6 +++---
.../optimizer/LikeSimplificationSuite.scala | 7 +++----
.../sql/catalyst/optimizer/OptimizeInSuite.scala | 8 ++++----
.../optimizer/ProjectCollapsingSuite.scala | 3 +--
.../optimizer/SetOperationPushDownSuite.scala | 4 ++--
.../SimplifyCaseConversionExpressionsSuite.scala | 7 +++----
.../spark/sql/catalyst/plans/PlanTest.scala | 2 +-
.../sql/catalyst/plans/SameResultSuite.scala | 4 ++--
.../spark/sql/catalyst/trees/TreeNodeSuite.scala | 2 +-
.../sql/catalyst/util/DateTimeUtilsSuite.scala | 2 +-
.../spark/sql/catalyst/util/MetadataSuite.scala | 2 +-
.../org/apache/spark/sql/types/DecimalSuite.scala | 5 +++--
.../org/apache/spark/sql/CachedTableSuite.scala | 10 ++++------
.../apache/spark/sql/ColumnExpressionSuite.scala | 2 +-
.../org/apache/spark/sql/DataFrameSuite.scala | 2 +-
.../apache/spark/sql/DataFrameWindowSuite.scala | 2 +-
.../apache/spark/sql/DatasetAggregatorSuite.scala | 5 ++---
.../scala/org/apache/spark/sql/DatasetSuite.scala | 3 +--
.../org/apache/spark/sql/DateFunctionsSuite.scala | 2 +-
.../apache/spark/sql/ExtraStrategiesSuite.scala | 2 +-
.../org/apache/spark/sql/ListTablesSuite.scala | 2 +-
.../apache/spark/sql/MultiSQLContextsSuite.scala | 3 ++-
.../scala/org/apache/spark/sql/QueryTest.scala | 10 +++++-----
.../scala/org/apache/spark/sql/RowSuite.scala | 2 +-
.../scala/org/apache/spark/sql/SQLConfSuite.scala | 3 +--
.../org/apache/spark/sql/SQLQuerySuite.scala | 2 +-
.../apache/spark/sql/UserDefinedTypeSuite.scala | 4 +---
.../sql/execution/ExchangeCoordinatorSuite.scala | 4 ++--
.../sql/execution/GroupedIteratorSuite.scala | 2 +-
.../apache/spark/sql/execution/PlannerSuite.scala | 2 +-
.../apache/spark/sql/execution/SortSuite.scala | 3 +--
.../UnsafeFixedWidthAggregationMapSuite.scala | 8 ++++----
.../execution/UnsafeKVExternalSorterSuite.scala | 2 +-
.../sql/execution/UnsafeRowSerializerSuite.scala | 11 +++++------
.../sql/execution/columnar/ColumnTypeSuite.scala | 7 +++----
.../execution/columnar/ColumnarTestUtils.scala | 2 +-
.../columnar/NullableColumnAccessorSuite.scala | 2 +-
.../columnar/NullableColumnBuilderSuite.scala | 2 +-
.../columnar/compression/BooleanBitSetSuite.scala | 2 +-
.../execution/datasources/json/JsonSuite.scala | 1 +
.../parquet/ParquetCompatibilityTest.scala | 2 +-
.../datasources/parquet/ParquetFilterSuite.scala | 2 +-
.../datasources/parquet/ParquetIOSuite.scala | 7 ++-----
.../parquet/ParquetPartitionDiscoverySuite.scala | 2 +-
.../datasources/parquet/ParquetQuerySuite.scala | 2 +-
.../datasources/parquet/ParquetTest.scala | 7 +++----
.../execution/datasources/text/TextSuite.scala | 3 +--
.../sql/execution/joins/BroadcastJoinSuite.scala | 2 +-
.../sql/execution/joins/InnerJoinSuite.scala | 2 +-
.../sql/execution/joins/OuterJoinSuite.scala | 4 ++--
.../spark/sql/execution/joins/SemiJoinSuite.scala | 4 ++--
.../sql/execution/local/HashJoinNodeSuite.scala | 4 ++--
.../spark/sql/execution/local/LocalNodeTest.scala | 3 +--
.../spark/sql/execution/ui/SQLListenerSuite.scala | 2 +-
.../org/apache/spark/sql/jdbc/JDBCSuite.scala | 2 +-
.../spark/sql/sources/PartitionedWriteSuite.scala | 2 +-
.../apache/spark/sql/sources/SaveLoadSuite.scala | 2 +-
.../apache/spark/sql/test/ProcessTestUtils.scala | 2 +-
.../org/apache/spark/sql/test/SQLTestUtils.scala | 2 +-
.../spark/sql/hive/thriftserver/CliSuite.scala | 6 +++---
.../thriftserver/HiveThriftServer2Suites.scala | 4 ++--
.../apache/spark/sql/hive/CachedTableSuite.scala | 2 +-
.../spark/sql/hive/ErrorPositionSuite.scala | 3 +--
.../sql/hive/HiveDataFrameAnalyticsSuite.scala | 3 ++-
.../spark/sql/hive/HiveDataFrameJoinSuite.scala | 2 +-
.../spark/sql/hive/HiveInspectorSuite.scala | 2 +-
.../sql/hive/HiveMetastoreCatalogSuite.scala | 2 +-
.../apache/spark/sql/hive/HiveParquetSuite.scala | 2 +-
.../org/apache/spark/sql/hive/HiveQlSuite.scala | 5 ++---
.../spark/sql/hive/HiveSparkSubmitSuite.scala | 2 +-
.../spark/sql/hive/InsertIntoHiveTableSuite.scala | 2 +-
.../apache/spark/sql/hive/ListTablesSuite.scala | 4 ++--
.../sql/hive/MetastoreDataSourcesSuite.scala | 6 +++---
.../spark/sql/hive/MultiDatabaseSuite.scala | 2 +-
.../sql/hive/ParquetHiveCompatibilitySuite.scala | 2 +-
.../spark/sql/hive/QueryPartitionSuite.scala | 4 ++--
.../apache/spark/sql/hive/StatisticsSuite.scala | 2 +-
.../spark/sql/hive/client/VersionsSuite.scala | 4 ++--
.../sql/hive/execution/ConcurrentHiveSuite.scala | 3 ++-
.../sql/hive/execution/HiveComparisonTest.scala | 2 +-
.../sql/hive/execution/HiveExplainSuite.scala | 2 +-
.../execution/HiveOperatorQueryableSuite.scala | 2 +-
.../spark/sql/hive/execution/HiveQuerySuite.scala | 6 +++---
.../sql/hive/execution/HiveTableScanSuite.scala | 1 -
.../spark/sql/hive/execution/HiveUDFSuite.scala | 12 ++++++------
.../spark/sql/hive/execution/SQLQuerySuite.scala | 8 ++++----
.../execution/ScriptTransformationSuite.scala | 2 +-
.../spark/sql/hive/orc/OrcFilterSuite.scala | 2 +-
.../sql/hive/orc/OrcPartitionDiscoverySuite.scala | 2 +-
.../spark/sql/hive/orc/OrcSourceSuite.scala | 2 +-
.../org/apache/spark/sql/hive/orc/OrcTest.scala | 2 +-
.../org/apache/spark/sql/hive/parquetSuites.scala | 4 ++--
.../sources/CommitFailureTestRelationSuite.scala | 2 +-
.../sources/SimpleTextHadoopFsRelationSuite.scala | 5 ++---
.../spark/sql/sources/SimpleTextRelation.scala | 6 +++---
.../spark/streaming/DStreamScopeSuite.scala | 2 +-
.../org/apache/spark/streaming/FailureSuite.scala | 2 +-
.../spark/streaming/InputStreamsSuite.scala | 16 ++++++++--------
.../spark/streaming/MapWithStateSuite.scala | 4 ++--
.../spark/streaming/MasterFailureTest.scala | 18 ++++++++----------
.../streaming/ReceivedBlockHandlerSuite.scala | 5 +++--
.../streaming/ReceiverInputDStreamSuite.scala | 2 +-
.../apache/spark/streaming/ReceiverSuite.scala | 2 +-
.../spark/streaming/StreamingListenerSuite.scala | 12 ++++++------
.../apache/spark/streaming/TestSuiteBase.scala | 6 +++---
.../spark/streaming/WindowOperationsSuite.scala | 2 +-
.../streaming/rdd/MapWithStateRDDSuite.scala | 2 +-
.../rdd/WriteAheadLogBackedBlockRDDSuite.scala | 2 +-
.../streaming/receiver/BlockGeneratorSuite.scala | 4 ++--
.../scheduler/InputInfoTrackerSuite.scala | 2 +-
.../spark/streaming/util/WriteAheadLogSuite.scala | 11 +++++------
.../streaming/util/WriteAheadLogUtilsSuite.scala | 2 +-
.../types/UTF8StringPropertyCheckSuite.scala | 1 -
.../yarn/ClientDistributedCacheManagerSuite.scala | 14 ++++++--------
.../apache/spark/deploy/yarn/ClientSuite.scala | 2 +-
.../spark/deploy/yarn/YarnAllocatorSuite.scala | 9 +++------
.../deploy/yarn/YarnSparkHadoopUtilSuite.scala | 6 ++----
.../network/shuffle/ShuffleTestAccessor.scala | 2 +-
281 files changed, 517 insertions(+), 575 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
index 0c14bef..7b02380 100644
--- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
@@ -24,18 +24,14 @@ import scala.language.existentials
import scala.util.Random
import org.scalatest.BeforeAndAfter
-import org.scalatest.concurrent.PatienceConfiguration
import org.scalatest.concurrent.Eventually._
+import org.scalatest.concurrent.PatienceConfiguration
import org.scalatest.time.SpanSugar._
-import org.apache.spark.rdd.{ReliableRDDCheckpointData, RDD}
-import org.apache.spark.storage._
+import org.apache.spark.rdd.{RDD, ReliableRDDCheckpointData}
import org.apache.spark.shuffle.hash.HashShuffleManager
import org.apache.spark.shuffle.sort.SortShuffleManager
-import org.apache.spark.storage.BroadcastBlockId
-import org.apache.spark.storage.RDDBlockId
-import org.apache.spark.storage.ShuffleBlockId
-import org.apache.spark.storage.ShuffleIndexBlockId
+import org.apache.spark.storage._
/**
* An abstract base class for context cleaner tests, which sets up a context with a config
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
index fedfbd5..4e678fb 100644
--- a/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExecutorAllocationManagerSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark
import scala.collection.mutable
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
+
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/FailureSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 203dab9..3def8b0 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -17,10 +17,10 @@
package org.apache.spark
-import org.apache.spark.util.NonSerializable
-
import java.io.{IOException, NotSerializableException, ObjectInputStream}
+import org.apache.spark.util.NonSerializable
+
// Common state shared by FailureSuite-launched tasks. We use a global object
// for this because any local variables used in the task closures will rightfully
// be copied for each task, so there's no other way for them to share state.
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/FileServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index 2c32b69..bc7059b 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -27,10 +27,10 @@ import org.apache.commons.lang3.RandomUtils
import org.apache.spark.util.Utils
-import SSLSampleConfigs._
-
class FileServerSuite extends SparkFunSuite with LocalSparkContext {
+ import SSLSampleConfigs._
+
@transient var tmpDir: File = _
@transient var tmpFile: File = _
@transient var tmpJarUrl: String = _
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/FileSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 2e47801..993834f 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -21,17 +21,16 @@ import java.io.{File, FileWriter}
import scala.io.Source
-import org.apache.spark.input.PortableDataStream
-import org.apache.spark.storage.StorageLevel
-
import org.apache.hadoop.io._
import org.apache.hadoop.io.compress.DefaultCodec
-import org.apache.hadoop.mapred.{JobConf, FileAlreadyExistsException, FileSplit, TextInputFormat, TextOutputFormat}
+import org.apache.hadoop.mapred.{FileAlreadyExistsException, FileSplit, JobConf, TextInputFormat, TextOutputFormat}
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
-import org.apache.spark.rdd.{NewHadoopRDD, HadoopRDD}
+import org.apache.spark.input.PortableDataStream
+import org.apache.spark.rdd.{HadoopRDD, NewHadoopRDD}
+import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
class FileSuite extends SparkFunSuite with LocalSparkContext {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
index 9b43341..18e5350 100644
--- a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
@@ -25,13 +25,13 @@ import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
-import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
-import org.mockito.Mockito.{mock, spy, verify, when}
import org.mockito.Matchers
import org.mockito.Matchers._
+import org.mockito.Mockito.{mock, spy, verify, when}
+import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
import org.apache.spark.executor.TaskMetrics
-import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEnv, RpcEndpointRef}
+import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index 2146819..e1a0bf7 100644
--- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -17,7 +17,7 @@
package org.apache.spark
-import _root_.io.netty.util.internal.logging.{Slf4JLoggerFactory, InternalLoggerFactory}
+import _root_.io.netty.util.internal.logging.{InternalLoggerFactory, Slf4JLoggerFactory}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.Suite
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 5b29d69..3819c0a 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -19,10 +19,10 @@ package org.apache.spark
import scala.collection.mutable.ArrayBuffer
-import org.mockito.Mockito._
import org.mockito.Matchers.{any, isA}
+import org.mockito.Mockito._
-import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcCallContext, RpcEnv}
+import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.storage.{BlockManagerId, ShuffleBlockId}
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
index 25b79bc..fa35819 100644
--- a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
@@ -21,9 +21,10 @@ import java.io.File
import javax.net.ssl.SSLContext
import com.google.common.io.Files
-import org.apache.spark.util.Utils
import org.scalatest.BeforeAndAfterAll
+import org.apache.spark.util.Utils
+
class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
test("test resolving property file as spark conf ") {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index 0de10ae..c45d814 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -17,17 +17,17 @@
package org.apache.spark
-import java.util.concurrent.{Callable, Executors, ExecutorService, CyclicBarrier}
+import java.util.concurrent.{Callable, CyclicBarrier, Executors, ExecutorService}
import org.scalatest.Matchers
import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
import org.apache.spark.memory.TaskMemoryManager
import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, ShuffledRDD, SubtractedRDD}
-import org.apache.spark.scheduler.{MyRDD, MapStatus, SparkListener, SparkListenerTaskEnd}
+import org.apache.spark.scheduler.{MapStatus, MyRDD, SparkListener, SparkListenerTaskEnd}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.shuffle.ShuffleWriter
-import org.apache.spark.storage.{ShuffleDataBlockId, ShuffleBlockId}
+import org.apache.spark.storage.{ShuffleBlockId, ShuffleDataBlockId}
import org.apache.spark.util.MutablePair
abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkContext {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
index 5354731..7a897c2 100644
--- a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
@@ -26,8 +26,8 @@ import org.apache.commons.io.filefilter.TrueFileFilter
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.rdd.ShuffledRDD
-import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
+import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.util.Utils
class SortShuffleSuite extends ShuffleSuite with BeforeAndAfterAll {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index ff9a92c..2fe99e3 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -17,17 +17,18 @@
package org.apache.spark
-import java.util.concurrent.{TimeUnit, Executors}
+import java.util.concurrent.{Executors, TimeUnit}
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.language.postfixOps
-import scala.util.{Try, Random}
+import scala.util.{Random, Try}
+
+import com.esotericsoftware.kryo.Kryo
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
-import org.apache.spark.util.{RpcUtils, ResetSystemProperties}
-import com.esotericsoftware.kryo.Kryo
+import org.apache.spark.util.{ResetSystemProperties, RpcUtils}
class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
test("Test byteString conversion") {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index 2bdbd70..3706455 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark
import org.scalatest.Assertions
+
import org.apache.spark.storage.StorageLevel
class SparkContextInfoSuite extends SparkFunSuite with LocalSparkContext {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
index d18e078..52919c1 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
@@ -19,11 +19,11 @@ package org.apache.spark
import org.scalatest.PrivateMethodTester
-import org.apache.spark.util.Utils
import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
+import org.apache.spark.util.Utils
class SparkContextSchedulerCreationSuite
extends SparkFunSuite with LocalSparkContext with PrivateMethodTester with Logging {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 172ef05..556afd0 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -20,18 +20,18 @@ package org.apache.spark
import java.io.File
import java.util.concurrent.TimeUnit
+import scala.concurrent.Await
+import scala.concurrent.duration.Duration
+
import com.google.common.base.Charsets._
import com.google.common.io.Files
-
import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
-import org.apache.spark.util.Utils
-
-import scala.concurrent.Await
-import scala.concurrent.duration.Duration
import org.scalatest.Matchers._
+import org.apache.spark.util.Utils
+
class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
test("Only one SparkContext may be active at a time") {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index 54c131c..fc31b78 100644
--- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -17,9 +17,8 @@
package org.apache.spark
-import java.util.concurrent.{TimeUnit, Semaphore}
-import java.util.concurrent.atomic.AtomicBoolean
-import java.util.concurrent.atomic.AtomicInteger
+import java.util.concurrent.{Semaphore, TimeUnit}
+import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import org.apache.spark.scheduler._
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
index 135c56b..b38a366 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonBroadcastSuite.scala
@@ -17,9 +17,9 @@
package org.apache.spark.api.python
-import scala.io.Source
+import java.io.{File, PrintWriter}
-import java.io.{PrintWriter, File}
+import scala.io.Source
import org.scalatest.Matchers
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/DeployTestUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/DeployTestUtils.scala b/core/src/test/scala/org/apache/spark/deploy/DeployTestUtils.scala
index 3164760..86455a1 100644
--- a/core/src/test/scala/org/apache/spark/deploy/DeployTestUtils.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/DeployTestUtils.scala
@@ -20,9 +20,9 @@ package org.apache.spark.deploy
import java.io.File
import java.util.Date
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
-import org.apache.spark.{SecurityManager, SparkConf}
private[deploy] object DeployTestUtils {
def createAppDesc(): ApplicationDescription = {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
index d93febc..9ecf49b 100644
--- a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
@@ -24,10 +24,8 @@ import java.util.jar.Manifest
import scala.collection.mutable.ArrayBuffer
-import com.google.common.io.{Files, ByteStreams}
-
+import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.io.FileUtils
-
import org.apache.ivy.core.settings.IvySettings
import org.apache.spark.TestUtils.{createCompiledClass, JavaSourceFromString}
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
index 0a9f128..2d48e75 100644
--- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala
@@ -23,10 +23,10 @@ import com.fasterxml.jackson.core.JsonParseException
import org.json4s._
import org.json4s.jackson.JsonMethods
+import org.apache.spark.{JsonTestUtils, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, RecoveryState}
import org.apache.spark.deploy.worker.ExecutorRunner
-import org.apache.spark.{JsonTestUtils, SparkFunSuite}
class JsonProtocolSuite extends SparkFunSuite with JsonTestUtils {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
index 8dd31b4..f416ace 100644
--- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
@@ -22,9 +22,9 @@ import java.net.URL
import scala.collection.mutable
import scala.io.Source
-import org.apache.spark.scheduler.cluster.ExecutorInfo
-import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
+import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.SparkConfWithEnv
class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
index cc30ba2..13cba94 100644
--- a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
@@ -17,10 +17,10 @@
package org.apache.spark.deploy
-import java.io.{PrintStream, OutputStream, File}
+import java.io.{File, OutputStream, PrintStream}
import java.net.URI
-import java.util.jar.Attributes.Name
import java.util.jar.{JarFile, Manifest}
+import java.util.jar.Attributes.Name
import java.util.zip.ZipFile
import scala.collection.JavaConverters._
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 4b5039b..4877710 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -17,14 +17,14 @@
package org.apache.spark.deploy
-import java.io.{File, PrintStream, OutputStream}
+import java.io.{File, OutputStream, PrintStream}
import scala.collection.mutable.ArrayBuffer
-import org.scalatest.BeforeAndAfterAll
import org.apache.ivy.core.module.descriptor.MDArtifact
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.resolver.{AbstractResolver, FileSystemResolver, IBiblioResolver}
+import org.scalatest.BeforeAndAfterAll
import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 5cab17f..6cbf911 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -23,8 +23,8 @@ import java.net.URI
import java.util.concurrent.TimeUnit
import java.util.zip.{ZipInputStream, ZipOutputStream}
-import scala.io.Source
import scala.concurrent.duration._
+import scala.io.Source
import scala.language.postfixOps
import com.google.common.base.Charsets
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
index 7a44728..b4deed7 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/PersistenceEngineSuite.scala
@@ -25,7 +25,7 @@ import org.apache.curator.test.TestingServer
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.rpc.{RpcEndpoint, RpcEnv}
-import org.apache.spark.serializer.{Serializer, JavaSerializer}
+import org.apache.spark.serializer.{JavaSerializer, Serializer}
import org.apache.spark.util.Utils
class PersistenceEngineSuite extends SparkFunSuite {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
index fba835f..0c9382a 100644
--- a/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
@@ -23,11 +23,11 @@ import scala.io.Source
import scala.language.postfixOps
import org.json4s.jackson.JsonMethods._
-import org.json4s.JsonAST.{JNothing, JString, JInt}
+import org.json4s.JsonAST.{JInt, JNothing, JString}
import org.mockito.Mockito.{mock, when}
import org.scalatest.BeforeAndAfter
-import org.apache.spark.{SparkConf, SecurityManager, SparkFunSuite}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.MasterStateResponse
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
index fa39aa2..ee889bf 100644
--- a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
@@ -24,16 +24,16 @@ import javax.servlet.http.HttpServletResponse
import scala.collection.mutable
import com.google.common.base.Charsets
-import org.scalatest.BeforeAndAfterEach
import org.json4s.JsonAST._
import org.json4s.jackson.JsonMethods._
+import org.scalatest.BeforeAndAfterEach
import org.apache.spark._
-import org.apache.spark.rpc._
-import org.apache.spark.util.Utils
-import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.{SparkSubmit, SparkSubmitArguments}
+import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.DriverState._
+import org.apache.spark.rpc._
+import org.apache.spark.util.Utils
/**
* Tests for the REST application submission protocol used in standalone cluster mode.
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
index 7101cb9..607c0a4 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/CommandUtilsSuite.scala
@@ -17,10 +17,11 @@
package org.apache.spark.deploy.worker
+import org.scalatest.{Matchers, PrivateMethodTester}
+
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils
-import org.scalatest.{Matchers, PrivateMethodTester}
class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
index 6258c18..bd8b065 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala
@@ -19,8 +19,8 @@ package org.apache.spark.deploy.worker
import java.io.File
-import org.mockito.Mockito._
import org.mockito.Matchers._
+import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
index 98664dc..0240bf8 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala
@@ -19,8 +19,8 @@ package org.apache.spark.deploy.worker
import java.io.File
-import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
+import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
class ExecutorRunnerTest extends SparkFunSuite {
test("command includes appId") {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
index 082d5e8..101a44e 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala
@@ -19,11 +19,11 @@ package org.apache.spark.deploy.worker
import org.scalatest.Matchers
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
+import org.apache.spark.deploy.{Command, ExecutorState}
import org.apache.spark.deploy.DeployMessages.{DriverStateChanged, ExecutorStateChanged}
import org.apache.spark.deploy.master.DriverState
-import org.apache.spark.deploy.{Command, ExecutorState}
import org.apache.spark.rpc.{RpcAddress, RpcEnv}
-import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
class WorkerSuite extends SparkFunSuite with Matchers {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
index 0ffd91d..31bea32 100644
--- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala
@@ -17,9 +17,8 @@
package org.apache.spark.deploy.worker
-import org.apache.spark.{SparkConf, SparkFunSuite}
-import org.apache.spark.SecurityManager
-import org.apache.spark.rpc.{RpcEndpointAddress, RpcAddress, RpcEnv}
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
+import org.apache.spark.rpc.{RpcAddress, RpcEndpointAddress, RpcEnv}
class WorkerWatcherSuite extends SparkFunSuite {
test("WorkerWatcher shuts down on valid disassociation") {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
index 24184b0..d852255 100644
--- a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
@@ -23,13 +23,12 @@ import java.io.FileOutputStream
import scala.collection.immutable.IndexedSeq
-import org.scalatest.BeforeAndAfterAll
-
import org.apache.hadoop.io.Text
+import org.apache.hadoop.io.compress.{CompressionCodecFactory, DefaultCodec, GzipCodec}
+import org.scalatest.BeforeAndAfterAll
import org.apache.spark.{Logging, SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.util.Utils
-import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, GzipCodec}
/**
* Tests the correctness of
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala b/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
index 555b640..f2924a6 100644
--- a/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/memory/MemoryManagerSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark.memory
import java.util.concurrent.atomic.AtomicLong
import scala.collection.mutable
-import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future}
+import scala.concurrent.duration.Duration
import org.mockito.Matchers.{any, anyLong}
import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS}
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/memory/MemoryTestingUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/memory/MemoryTestingUtils.scala b/core/src/test/scala/org/apache/spark/memory/MemoryTestingUtils.scala
index 4b4c3b0..0e60cc8 100644
--- a/core/src/test/scala/org/apache/spark/memory/MemoryTestingUtils.scala
+++ b/core/src/test/scala/org/apache/spark/memory/MemoryTestingUtils.scala
@@ -17,7 +17,7 @@
package org.apache.spark.memory
-import org.apache.spark.{SparkEnv, TaskContextImpl, TaskContext}
+import org.apache.spark.{SparkEnv, TaskContext, TaskContextImpl}
/**
* Helper methods for mocking out memory-management-related classes in tests.
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
index 0706a6e..4a1e49b 100644
--- a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
+++ b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
@@ -20,7 +20,7 @@ package org.apache.spark.memory
import scala.collection.mutable
import org.apache.spark.SparkConf
-import org.apache.spark.storage.{BlockStatus, BlockId}
+import org.apache.spark.storage.{BlockId, BlockStatus}
class TestMemoryManager(conf: SparkConf)
extends MemoryManager(conf, numCores = 1, Long.MaxValue, Long.MaxValue) {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
index 44eb5a0..aaf62e0 100644
--- a/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/InputOutputMetricsSuite.scala
@@ -25,17 +25,17 @@ import org.apache.commons.lang3.RandomUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.{LongWritable, Text}
+import org.apache.hadoop.mapred.{FileSplit => OldFileSplit, InputSplit => OldInputSplit,
+ JobConf, LineRecordReader => OldLineRecordReader, RecordReader => OldRecordReader,
+ Reporter, TextInputFormat => OldTextInputFormat}
import org.apache.hadoop.mapred.lib.{CombineFileInputFormat => OldCombineFileInputFormat,
CombineFileRecordReader => OldCombineFileRecordReader, CombineFileSplit => OldCombineFileSplit}
-import org.apache.hadoop.mapred.{JobConf, Reporter, FileSplit => OldFileSplit,
- InputSplit => OldInputSplit, LineRecordReader => OldLineRecordReader,
- RecordReader => OldRecordReader, TextInputFormat => OldTextInputFormat}
+import org.apache.hadoop.mapreduce.{InputSplit => NewInputSplit, RecordReader => NewRecordReader,
+ TaskAttemptContext}
import org.apache.hadoop.mapreduce.lib.input.{CombineFileInputFormat => NewCombineFileInputFormat,
CombineFileRecordReader => NewCombineFileRecordReader, CombineFileSplit => NewCombineFileSplit,
FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
-import org.apache.hadoop.mapreduce.{TaskAttemptContext, InputSplit => NewInputSplit,
- RecordReader => NewRecordReader}
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SharedSparkContext, SparkFunSuite}
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index 41f2ff7..b24f5d7 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -17,10 +17,9 @@
package org.apache.spark.metrics
-import org.apache.spark.SparkConf
-
import org.scalatest.BeforeAndAfter
+import org.apache.spark.SparkConf
import org.apache.spark.SparkFunSuite
class MetricsConfigSuite extends SparkFunSuite with BeforeAndAfter {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index 9c389c7..5d85542 100644
--- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -17,16 +17,15 @@
package org.apache.spark.metrics
+import scala.collection.mutable.ArrayBuffer
+
+import com.codahale.metrics.MetricRegistry
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.master.MasterSource
import org.apache.spark.metrics.source.Source
-import com.codahale.metrics.MetricRegistry
-
-import scala.collection.mutable.ArrayBuffer
-
class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateMethodTester{
var filePath: String = _
var conf: SparkConf = null
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
index 98da941..47dbcb8 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
@@ -22,20 +22,21 @@ import java.nio._
import java.nio.charset.Charset
import java.util.concurrent.TimeUnit
-import scala.concurrent.duration._
import scala.concurrent.{Await, Promise}
+import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
import com.google.common.io.CharStreams
-import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
-import org.apache.spark.network.shuffle.BlockFetchingListener
-import org.apache.spark.network.{BlockDataManager, BlockTransferService}
-import org.apache.spark.storage.{BlockId, ShuffleBlockId}
-import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.ShouldMatchers
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
+import org.apache.spark.network.{BlockDataManager, BlockTransferService}
+import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
+import org.apache.spark.network.shuffle.BlockFetchingListener
+import org.apache.spark.storage.{BlockId, ShuffleBlockId}
+
class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with ShouldMatchers {
test("security default off") {
val conf = new SparkConf()
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
index 92daf4e..cc1a9e0 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
@@ -17,11 +17,12 @@
package org.apache.spark.network.netty
-import org.apache.spark.network.BlockDataManager
-import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.mockito.Mockito.mock
import org.scalatest._
+import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
+import org.apache.spark.network.BlockDataManager
+
class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rdd/LocalCheckpointSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/LocalCheckpointSuite.scala b/core/src/test/scala/org/apache/spark/rdd/LocalCheckpointSuite.scala
index 3a22a98..e694f5e 100644
--- a/core/src/test/scala/org/apache/spark/rdd/LocalCheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/LocalCheckpointSuite.scala
@@ -17,9 +17,9 @@
package org.apache.spark.rdd
-import org.apache.spark.{SparkException, SparkContext, LocalSparkContext, SparkFunSuite}
-
import org.mockito.Mockito.spy
+
+import org.apache.spark.{LocalSparkContext, SparkContext, SparkException, SparkFunSuite}
import org.apache.spark.storage.{RDDBlockId, StorageLevel}
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
index 7d2cfcc..16e2d2e 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
@@ -17,18 +17,18 @@
package org.apache.spark.rdd
-import org.apache.commons.math3.distribution.{PoissonDistribution, BinomialDistribution}
-import org.apache.hadoop.fs.FileSystem
-import org.apache.hadoop.mapred._
-import org.apache.hadoop.util.Progressable
-
import scala.collection.mutable.{ArrayBuffer, HashSet}
import scala.util.Random
+import org.apache.commons.math3.distribution.{BinomialDistribution, PoissonDistribution}
import org.apache.hadoop.conf.{Configurable, Configuration}
-import org.apache.hadoop.mapreduce.{JobContext => NewJobContext, OutputCommitter => NewOutputCommitter,
-OutputFormat => NewOutputFormat, RecordWriter => NewRecordWriter,
-TaskAttemptContext => NewTaskAttempContext}
+import org.apache.hadoop.fs.FileSystem
+import org.apache.hadoop.mapred._
+import org.apache.hadoop.mapreduce.{JobContext => NewJobContext,
+ OutputCommitter => NewOutputCommitter, OutputFormat => NewOutputFormat,
+ RecordWriter => NewRecordWriter, TaskAttemptContext => NewTaskAttempContext}
+import org.apache.hadoop.util.Progressable
+
import org.apache.spark.{Partitioner, SharedSparkContext, SparkFunSuite}
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
index 5f73ec8..1eebc92 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala
@@ -19,15 +19,15 @@ package org.apache.spark.rdd
import java.io.File
-import org.apache.hadoop.fs.Path
-import org.apache.hadoop.io.{LongWritable, Text}
-import org.apache.hadoop.mapred.{FileSplit, JobConf, TextInputFormat}
-
import scala.collection.Map
import scala.language.postfixOps
import scala.sys.process._
import scala.util.Try
+import org.apache.hadoop.fs.Path
+import org.apache.hadoop.io.{LongWritable, Text}
+import org.apache.hadoop.mapred.{FileSplit, JobConf, TextInputFormat}
+
import org.apache.spark._
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index 18d1466..24acbed 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -17,14 +17,14 @@
package org.apache.spark.rdd
-import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
-import com.esotericsoftware.kryo.KryoException
-
-import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.collection.JavaConverters._
+import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.reflect.ClassTag
+import com.esotericsoftware.kryo.KryoException
+
import org.apache.spark._
import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
import org.apache.spark.rdd.RDDSuiteUtils._
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 924fce7..64e486d 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -18,9 +18,9 @@
package org.apache.spark.rpc
import java.io.{File, NotSerializableException}
-import java.util.UUID
import java.nio.charset.StandardCharsets.UTF_8
-import java.util.concurrent.{TimeUnit, CountDownLatch, TimeoutException}
+import java.util.UUID
+import java.util.concurrent.{CountDownLatch, TimeoutException, TimeUnit}
import scala.collection.mutable
import scala.concurrent.Await
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala b/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
index 2136795..12113be 100644
--- a/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
@@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicInteger
import org.mockito.Mockito._
import org.apache.spark.SparkFunSuite
-import org.apache.spark.rpc.{RpcEnv, RpcEndpoint, RpcAddress, TestRpcEndpoint}
+import org.apache.spark.rpc.{RpcAddress, RpcEndpoint, RpcEnv, TestRpcEndpoint}
class InboxSuite extends SparkFunSuite {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala b/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
index d4aebe9..0c156fe 100644
--- a/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/netty/NettyRpcHandlerSuite.scala
@@ -21,11 +21,11 @@ import java.net.InetSocketAddress
import java.nio.ByteBuffer
import io.netty.channel.Channel
-import org.mockito.Mockito._
import org.mockito.Matchers._
+import org.mockito.Mockito._
import org.apache.spark.SparkFunSuite
-import org.apache.spark.network.client.{TransportResponseHandler, TransportClient}
+import org.apache.spark.network.client.{TransportClient, TransportResponseHandler}
import org.apache.spark.network.server.StreamManager
import org.apache.spark.rpc._
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/CoarseGrainedSchedulerBackendSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/CoarseGrainedSchedulerBackendSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/CoarseGrainedSchedulerBackendSuite.scala
index eef6aaf..70f40fb 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/CoarseGrainedSchedulerBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/CoarseGrainedSchedulerBackendSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.scheduler
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkFunSuite}
-import org.apache.spark.util.{SerializableBuffer, AkkaUtils}
+import org.apache.spark.util.{AkkaUtils, SerializableBuffer}
class CoarseGrainedSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 2869f0f..370a284 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.scheduler
import java.util.Properties
-import scala.collection.mutable.{ArrayBuffer, HashSet, HashMap, Map}
+import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map}
import scala.language.reflectiveCalls
import scala.util.control.NonFatal
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
index 15c8de6..56e0f01 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
@@ -17,13 +17,13 @@
package org.apache.spark.scheduler
-import org.apache.spark.storage.BlockManagerId
+import scala.util.Random
-import org.apache.spark.{SparkConf, SparkFunSuite}
-import org.apache.spark.serializer.JavaSerializer
import org.roaringbitmap.RoaringBitmap
-import scala.util.Random
+import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.serializer.JavaSerializer
+import org.apache.spark.storage.BlockManagerId
class MapStatusSuite extends SparkFunSuite {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/NotSerializableFakeTask.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/NotSerializableFakeTask.scala b/core/src/test/scala/org/apache/spark/scheduler/NotSerializableFakeTask.scala
index f333247..1dca4bd 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/NotSerializableFakeTask.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/NotSerializableFakeTask.scala
@@ -17,7 +17,7 @@
package org.apache.spark.scheduler
-import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
+import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
import org.apache.spark.TaskContext
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorIntegrationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorIntegrationSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorIntegrationSuite.scala
index 1ae5b03..9f41aca 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorIntegrationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorIntegrationSuite.scala
@@ -19,9 +19,9 @@ package org.apache.spark.scheduler
import org.apache.hadoop.mapred.{FileOutputCommitter, TaskAttemptContext}
import org.scalatest.concurrent.Timeouts
-import org.scalatest.time.{Span, Seconds}
+import org.scalatest.time.{Seconds, Span}
-import org.apache.spark.{SparkConf, SparkContext, LocalSparkContext, SparkFunSuite, TaskContext}
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite, TaskContext}
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
index 7345508..c461da6 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala
@@ -20,22 +20,21 @@ package org.apache.spark.scheduler
import java.io.File
import java.util.concurrent.TimeoutException
+import scala.concurrent.Await
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+import org.apache.hadoop.mapred.{JobConf, OutputCommitter, TaskAttemptContext, TaskAttemptID}
import org.mockito.Matchers
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.BeforeAndAfter
-import org.apache.hadoop.mapred.{TaskAttemptID, JobConf, TaskAttemptContext, OutputCommitter}
-
import org.apache.spark._
-import org.apache.spark.rdd.{RDD, FakeOutputCommitter}
+import org.apache.spark.rdd.{FakeOutputCommitter, RDD}
import org.apache.spark.util.Utils
-import scala.concurrent.Await
-import scala.concurrent.duration._
-import scala.language.postfixOps
-
/**
* Unit tests for the output commit coordination functionality.
*
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index f20d5be..dc15f59 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -24,10 +24,9 @@ import scala.collection.JavaConverters._
import org.scalatest.Matchers
-import org.apache.spark.SparkException
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkFunSuite}
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.util.ResetSystemProperties
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Matchers
with ResetSystemProperties {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index 40ebfdd..e5ec44a 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -17,17 +17,15 @@
package org.apache.spark.scheduler
-import org.mockito.Mockito._
import org.mockito.Matchers.any
-
+import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter
import org.apache.spark._
+import org.apache.spark.metrics.source.JvmSource
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.util.{TaskCompletionListener, TaskCompletionListenerException}
-import org.apache.spark.metrics.source.JvmSource
-
class TaskContextSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkContext {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
index 525ee0d..a4110d2 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
@@ -20,17 +20,17 @@ package org.apache.spark.scheduler.cluster.mesos
import java.util
import java.util.Collections
-import org.apache.mesos.Protos.Value.Scalar
-import org.apache.mesos.Protos._
import org.apache.mesos.{Protos, Scheduler, SchedulerDriver}
+import org.apache.mesos.Protos._
+import org.apache.mesos.Protos.Value.Scalar
+import org.mockito.Matchers
import org.mockito.Matchers._
import org.mockito.Mockito._
-import org.mockito.Matchers
import org.scalatest.mock.MockitoSugar
import org.scalatest.BeforeAndAfter
+import org.apache.spark.{LocalSparkContext, SecurityManager, SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.TaskSchedulerImpl
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SecurityManager, SparkFunSuite}
class CoarseMesosSchedulerBackendSuite extends SparkFunSuite
with LocalSparkContext
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
index c4dc560..504e578 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendSuite.scala
@@ -26,19 +26,19 @@ import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import org.apache.mesos.Protos.Value.Scalar
import org.apache.mesos.Protos._
+import org.apache.mesos.Protos.Value.Scalar
import org.apache.mesos.SchedulerDriver
+import org.mockito.{ArgumentCaptor, Matchers}
import org.mockito.Matchers._
import org.mockito.Mockito._
-import org.mockito.{ArgumentCaptor, Matchers}
import org.scalatest.mock.MockitoSugar
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.executor.MesosExecutorBackend
-import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.scheduler.{LiveListenerBus, SparkListenerExecutorAdded,
TaskDescription, TaskSchedulerImpl, WorkerOffer}
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.scheduler.cluster.ExecutorInfo
class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/scheduler/mesos/MesosClusterSchedulerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/mesos/MesosClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/mesos/MesosClusterSchedulerSuite.scala
index f5cef1c..98fdc58 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/mesos/MesosClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/mesos/MesosClusterSchedulerSuite.scala
@@ -21,11 +21,10 @@ import java.util.Date
import org.scalatest.mock.MockitoSugar
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.deploy.mesos.MesosDriverDescription
import org.apache.spark.scheduler.cluster.mesos._
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkFunSuite}
-
class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/serializer/GenericAvroSerializerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/serializer/GenericAvroSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/GenericAvroSerializerSuite.scala
index 87f25e7..3734f1c 100644
--- a/core/src/test/scala/org/apache/spark/serializer/GenericAvroSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/GenericAvroSerializerSuite.scala
@@ -20,11 +20,11 @@ package org.apache.spark.serializer
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.nio.ByteBuffer
-import com.esotericsoftware.kryo.io.{Output, Input}
-import org.apache.avro.{SchemaBuilder, Schema}
+import com.esotericsoftware.kryo.io.{Input, Output}
+import org.apache.avro.{Schema, SchemaBuilder}
import org.apache.avro.generic.GenericData.Record
-import org.apache.spark.{SparkFunSuite, SharedSparkContext}
+import org.apache.spark.{SharedSparkContext, SparkFunSuite}
class GenericAvroSerializerSuite extends SparkFunSuite with SharedSparkContext {
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
index 935a091..a0483f6 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
@@ -17,12 +17,11 @@
package org.apache.spark.serializer
-import org.apache.spark.util.Utils
-
import com.esotericsoftware.kryo.Kryo
import org.apache.spark._
import org.apache.spark.serializer.KryoDistributedTest._
+import org.apache.spark.util.Utils
class KryoSerializerDistributedSuite extends SparkFunSuite {
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/serializer/KryoSerializerResizableOutputSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerResizableOutputSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerResizableOutputSuite.scala
index a9b209c..21251f0 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerResizableOutputSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerResizableOutputSuite.scala
@@ -18,11 +18,10 @@
package org.apache.spark.serializer
import org.apache.spark.{SparkConf, SparkFunSuite}
-import org.apache.spark.SparkContext
import org.apache.spark.LocalSparkContext
+import org.apache.spark.SparkContext
import org.apache.spark.SparkException
-
class KryoSerializerResizableOutputSuite extends SparkFunSuite {
// trial and error showed this will not serialize with 1mb buffer
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 9fcc22b..8f9b453 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.serializer
-import java.io.{ByteArrayInputStream, ByteArrayOutputStream, FileOutputStream, FileInputStream}
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream, FileInputStream, FileOutputStream}
import scala.collection.JavaConverters._
import scala.collection.mutable
@@ -25,14 +25,13 @@ import scala.reflect.ClassTag
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
-
import org.roaringbitmap.RoaringBitmap
import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
import org.apache.spark.scheduler.HighlyCompressedMapStatus
import org.apache.spark.serializer.KryoTest._
-import org.apache.spark.util.Utils
import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.Utils
class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
http://git-wip-us.apache.org/repos/asf/spark/blob/b3ba1be3/core/src/test/scala/org/apache/spark/serializer/TestSerializer.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/serializer/TestSerializer.scala b/core/src/test/scala/org/apache/spark/serializer/TestSerializer.scala
index c1e0a29..1703787 100644
--- a/core/src/test/scala/org/apache/spark/serializer/TestSerializer.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/TestSerializer.scala
@@ -17,12 +17,11 @@
package org.apache.spark.serializer
-import java.io.{EOFException, OutputStream, InputStream}
+import java.io.{EOFException, InputStream, OutputStream}
import java.nio.ByteBuffer
import scala.reflect.ClassTag
-
/**
* A serializer implementation that always returns two elements in a deserialization stream.
*/
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org