You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2016/03/03 11:12:44 UTC

[3/3] spark git commit: [SPARK-13583][CORE][STREAMING] Remove unused imports and add checkstyle rule

[SPARK-13583][CORE][STREAMING] Remove unused imports and add checkstyle rule

## What changes were proposed in this pull request?

After SPARK-6990, `dev/lint-java` keeps Java code healthy and helps PR review by saving much time.
This issue aims remove unused imports from Java/Scala code and add `UnusedImports` checkstyle rule to help developers.

## How was this patch tested?
```
./dev/lint-java
./build/sbt compile
```

Author: Dongjoon Hyun <do...@apache.org>

Closes #11438 from dongjoon-hyun/SPARK-13583.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b5f02d67
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b5f02d67
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b5f02d67

Branch: refs/heads/master
Commit: b5f02d6743ecb1633b7b13382f76cb8bfc2aa95c
Parents: e97fc7f
Author: Dongjoon Hyun <do...@apache.org>
Authored: Thu Mar 3 10:12:32 2016 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Thu Mar 3 10:12:32 2016 +0000

----------------------------------------------------------------------
 checkstyle.xml                                                | 1 +
 .../java/org/apache/spark/network/protocol/OneWayMessage.java | 1 -
 .../java/org/apache/spark/network/protocol/RpcRequest.java    | 1 -
 .../java/org/apache/spark/network/protocol/RpcResponse.java   | 1 -
 .../java/org/apache/spark/network/protocol/StreamFailure.java | 3 ---
 .../java/org/apache/spark/network/protocol/StreamRequest.java | 3 ---
 .../org/apache/spark/network/protocol/StreamResponse.java     | 1 -
 .../java/org/apache/spark/network/sasl/SaslEncryption.java    | 1 -
 .../apache/spark/network/server/TransportRequestHandler.java  | 1 -
 .../main/java/org/apache/spark/network/util/NettyUtils.java   | 2 --
 .../org/apache/spark/network/util/TransportFrameDecoder.java  | 1 -
 .../org/apache/spark/network/sasl/ShuffleSecretManager.java   | 1 -
 .../spark/network/shuffle/ExternalShuffleSecuritySuite.java   | 1 -
 .../org/apache/spark/shuffle/sort/PackedRecordPointer.java    | 4 +---
 .../org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java    | 1 -
 .../spark/util/collection/unsafe/sort/PrefixComparators.java  | 1 -
 .../collection/unsafe/sort/RecordPointerAndKeyPrefix.java     | 4 +---
 .../scala/org/apache/spark/ExecutorAllocationManager.scala    | 2 +-
 core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala  | 1 -
 core/src/main/scala/org/apache/spark/Partitioner.scala        | 4 ++--
 core/src/main/scala/org/apache/spark/SparkContext.scala       | 1 -
 .../scala/org/apache/spark/TaskNotSerializableException.scala | 2 --
 core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala   | 3 ---
 .../scala/org/apache/spark/broadcast/BroadcastFactory.scala   | 1 -
 .../main/scala/org/apache/spark/deploy/RPackageUtils.scala    | 2 +-
 .../scala/org/apache/spark/deploy/master/DriverInfo.scala     | 1 -
 .../main/scala/org/apache/spark/deploy/worker/Worker.scala    | 2 +-
 .../scala/org/apache/spark/deploy/worker/ui/LogPage.scala     | 1 -
 .../main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala  | 1 -
 core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala        | 2 +-
 .../main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala  | 2 +-
 .../org/apache/spark/rdd/ReliableRDDCheckpointData.scala      | 1 -
 .../main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala   | 1 -
 .../src/main/scala/org/apache/spark/scheduler/ActiveJob.scala | 1 -
 .../scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala  | 2 --
 .../main/scala/org/apache/spark/scheduler/TaskScheduler.scala | 1 -
 .../spark/scheduler/cluster/CoarseGrainedClusterMessage.scala | 2 +-
 .../main/scala/org/apache/spark/serializer/Serializer.scala   | 4 ++--
 .../scala/org/apache/spark/shuffle/BaseShuffleHandle.scala    | 3 +--
 .../scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala | 2 --
 .../org/apache/spark/shuffle/hash/HashShuffleWriter.scala     | 1 -
 .../org/apache/spark/shuffle/sort/SortShuffleWriter.scala     | 1 -
 .../scala/org/apache/spark/storage/BlockManagerMaster.scala   | 2 +-
 core/src/main/scala/org/apache/spark/storage/DiskStore.scala  | 1 -
 core/src/main/scala/org/apache/spark/ui/JettyUtils.scala      | 2 +-
 .../org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala     | 1 -
 core/src/main/scala/org/apache/spark/util/SizeEstimator.scala | 1 -
 .../org/apache/spark/util/collection/AppendOnlyMap.scala      | 2 +-
 .../java/org/apache/spark/launcher/SparkLauncherSuite.java    | 3 ---
 .../org/apache/spark/serializer/TestJavaSerializerImpl.java   | 1 -
 .../test/scala/org/apache/spark/MapOutputTrackerSuite.scala   | 2 +-
 core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala    | 3 ---
 core/src/test/scala/org/apache/spark/ThreadingSuite.scala     | 4 +---
 .../org/apache/spark/deploy/LogUrlsStandaloneSuite.scala      | 2 +-
 .../scala/org/apache/spark/deploy/client/AppClientSuite.scala | 1 -
 .../apache/spark/deploy/history/FsHistoryProviderSuite.scala  | 6 +-----
 .../apache/spark/input/WholeTextFileRecordReaderSuite.scala   | 2 +-
 .../org/apache/spark/launcher/LauncherBackendSuite.scala      | 1 -
 .../scala/org/apache/spark/memory/TestMemoryManager.scala     | 4 +---
 core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala    | 2 +-
 .../test/scala/org/apache/spark/rpc/netty/InboxSuite.scala    | 2 +-
 .../cluster/mesos/CoarseMesosSchedulerBackendSuite.scala      | 2 +-
 core/src/test/scala/org/apache/spark/ui/UISuite.scala         | 1 -
 .../scala/org/apache/spark/util/ResetSystemProperties.scala   | 2 --
 .../test/scala/org/apache/spark/util/SizeEstimatorSuite.scala | 2 +-
 .../org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala    | 4 +---
 .../src/main/java/org/apache/spark/examples/JavaLogQuery.java | 1 -
 .../mllib/JavaMultiLabelClassificationMetricsExample.java     | 2 --
 .../scala/org/apache/spark/examples/DFSReadWriteTest.scala    | 1 -
 .../main/scala/org/apache/spark/examples/GroupByTest.scala    | 1 -
 .../main/scala/org/apache/spark/examples/LocalKMeans.scala    | 2 --
 .../src/main/scala/org/apache/spark/examples/LocalPi.scala    | 3 ---
 .../src/main/scala/org/apache/spark/examples/LogQuery.scala   | 1 -
 .../org/apache/spark/examples/SimpleSkewedGroupByTest.scala   | 1 -
 .../scala/org/apache/spark/examples/SkewedGroupByTest.scala   | 1 -
 .../main/scala/org/apache/spark/examples/SparkKMeans.scala    | 1 -
 .../main/scala/org/apache/spark/examples/SparkPageRank.scala  | 1 -
 .../src/main/scala/org/apache/spark/examples/SparkTC.scala    | 1 -
 .../org/apache/spark/examples/graphx/SynthBenchmark.scala     | 1 -
 .../apache/spark/examples/ml/LinearRegressionExample.scala    | 4 +---
 .../org/apache/spark/examples/mllib/CosineSimilarity.scala    | 1 -
 .../scala/org/apache/spark/examples/mllib/MovieLensALS.scala  | 1 -
 .../scala/org/apache/spark/examples/mllib/SampledRDDs.scala   | 1 -
 .../org/apache/spark/examples/streaming/ActorWordCount.scala  | 1 -
 .../org/apache/spark/examples/streaming/CustomReceiver.scala  | 2 +-
 .../spark/examples/streaming/FlumePollingEventCount.scala     | 3 ---
 .../apache/spark/examples/streaming/SqlNetworkWordCount.scala | 1 -
 .../spark/examples/streaming/StatefulNetworkWordCount.scala   | 1 -
 .../apache/spark/examples/streaming/TwitterAlgebirdCMS.scala  | 1 -
 .../apache/spark/examples/streaming/TwitterPopularTags.scala  | 1 -
 .../org/apache/spark/examples/streaming/ZeroMQWordCount.scala | 3 +--
 .../scala/org/apache/spark/streaming/TestOutputStream.scala   | 1 -
 .../org/apache/spark/streaming/mqtt/MQTTStreamSuite.scala     | 2 +-
 .../org/apache/spark/streaming/twitter/TwitterUtils.scala     | 4 ++--
 graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala  | 5 -----
 graphx/src/main/scala/org/apache/spark/graphx/VertexRDD.scala | 1 -
 .../scala/org/apache/spark/graphx/impl/EdgePartition.scala    | 2 +-
 .../org/apache/spark/graphx/impl/ReplicatedVertexView.scala   | 3 +--
 .../org/apache/spark/graphx/impl/RoutingTablePartition.scala  | 6 ------
 .../scala/org/apache/spark/graphx/impl/VertexPartition.scala  | 1 -
 .../scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala    | 1 -
 .../scala/org/apache/spark/graphx/lib/TriangleCount.scala     | 1 -
 .../scala/org/apache/spark/graphx/util/GraphGenerators.scala  | 4 ----
 .../test/scala/org/apache/spark/graphx/GraphOpsSuite.scala    | 2 --
 .../src/test/scala/org/apache/spark/graphx/PregelSuite.scala  | 3 +--
 .../org/apache/spark/graphx/impl/EdgePartitionSuite.scala     | 1 -
 .../apache/spark/graphx/lib/ConnectedComponentsSuite.scala    | 3 +--
 .../org/apache/spark/graphx/lib/ShortestPathsSuite.scala      | 6 +-----
 .../spark/graphx/lib/StronglyConnectedComponentsSuite.scala   | 5 +----
 .../org/apache/spark/launcher/AbstractCommandBuilder.java     | 2 --
 .../main/java/org/apache/spark/launcher/LauncherProtocol.java | 6 ------
 .../org/apache/spark/ml/classification/GBTClassifier.scala    | 6 +++---
 .../org/apache/spark/ml/evaluation/RegressionEvaluator.scala  | 2 +-
 .../org/apache/spark/ml/feature/ElementwiseProduct.scala      | 2 +-
 .../scala/org/apache/spark/ml/feature/StopWordsRemover.scala  | 2 +-
 .../src/main/scala/org/apache/spark/ml/feature/Word2Vec.scala | 2 +-
 .../scala/org/apache/spark/ml/regression/GBTRegressor.scala   | 7 ++++---
 .../apache/spark/mllib/api/python/FPGrowthModelWrapper.scala  | 3 +--
 .../org/apache/spark/mllib/api/python/PythonMLLibAPI.scala    | 5 ++---
 .../apache/spark/mllib/api/python/Word2VecModelWrapper.scala  | 2 +-
 .../spark/mllib/classification/LogisticRegression.scala       | 1 -
 .../apache/spark/mllib/clustering/GaussianMixtureModel.scala  | 2 +-
 .../org/apache/spark/mllib/evaluation/MultilabelMetrics.scala | 1 -
 .../scala/org/apache/spark/mllib/optimization/LBFGS.scala     | 1 -
 .../spark/mllib/stat/correlation/SpearmanCorrelation.scala    | 1 -
 .../org/apache/spark/mllib/tree/loss/AbsoluteError.scala      | 2 --
 .../main/scala/org/apache/spark/mllib/tree/loss/LogLoss.scala | 2 --
 .../scala/org/apache/spark/mllib/tree/loss/SquaredError.scala | 2 --
 .../main/scala/org/apache/spark/mllib/tree/model/Split.scala  | 1 -
 .../apache/spark/ml/classification/JavaNaiveBayesSuite.java   | 1 -
 .../java/org/apache/spark/ml/clustering/JavaKMeansSuite.java  | 1 -
 .../java/org/apache/spark/ml/feature/JavaBucketizerSuite.java | 1 -
 .../test/java/org/apache/spark/ml/feature/JavaDCTSuite.java   | 1 -
 .../java/org/apache/spark/ml/feature/JavaHashingTFSuite.java  | 1 -
 .../apache/spark/ml/feature/JavaPolynomialExpansionSuite.java | 1 -
 .../apache/spark/ml/feature/JavaStopWordsRemoverSuite.java    | 1 -
 .../org/apache/spark/ml/feature/JavaStringIndexerSuite.java   | 1 -
 .../org/apache/spark/ml/feature/JavaVectorAssemblerSuite.java | 1 -
 .../org/apache/spark/ml/feature/JavaVectorSlicerSuite.java    | 1 -
 .../java/org/apache/spark/ml/feature/JavaWord2VecSuite.java   | 1 -
 .../src/test/scala/org/apache/spark/ml/feature/DCTSuite.scala | 2 +-
 .../org/apache/spark/ml/feature/StandardScalerSuite.scala     | 1 -
 .../scala/org/apache/spark/ml/feature/Word2VecSuite.scala     | 2 +-
 .../scala/org/apache/spark/mllib/feature/Word2VecSuite.scala  | 1 -
 .../scala/org/apache/spark/mllib/random/RandomRDDsSuite.scala | 1 -
 .../src/main/scala/org/apache/spark/repl/Main.scala           | 2 +-
 .../src/main/scala/org/apache/spark/repl/SparkILoop.scala     | 4 ++--
 .../src/test/scala/org/apache/spark/repl/ReplSuite.scala      | 1 -
 sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala    | 1 -
 .../org/apache/spark/sql/catalyst/encoders/package.scala      | 1 -
 .../spark/sql/catalyst/expressions/ExpectsInputTypes.scala    | 1 -
 .../expressions/codegen/GenerateMutableProjection.scala       | 1 -
 .../scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala | 1 -
 .../spark/sql/catalyst/plans/logical/LocalRelation.scala      | 2 +-
 .../spark/sql/catalyst/plans/physical/partitioning.scala      | 1 -
 .../scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala  | 5 +----
 .../org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala  | 1 -
 .../apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala  | 1 -
 .../spark/sql/catalyst/expressions/LiteralGenerator.scala     | 2 --
 .../expressions/aggregate/HyperLogLogPlusPlusSuite.scala      | 2 --
 .../spark/sql/catalyst/optimizer/AggregateOptimizeSuite.scala | 2 +-
 .../apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala | 2 --
 .../apache/spark/sql/catalyst/plans/LogicalPlanSuite.scala    | 2 --
 .../org/apache/spark/sql/catalyst/plans/SameResultSuite.scala | 1 -
 .../spark/sql/execution/UnsafeFixedWidthAggregationMap.java   | 1 -
 .../datasources/parquet/VectorizedPlainValuesReader.java      | 2 --
 sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala  | 4 ++--
 .../src/main/scala/org/apache/spark/sql/GroupedDataset.scala  | 1 -
 .../src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala  | 4 ++--
 .../main/scala/org/apache/spark/sql/execution/SparkQl.scala   | 1 -
 .../sql/execution/aggregate/TungstenAggregationIterator.scala | 2 +-
 .../spark/sql/execution/datasources/WriterContainer.scala     | 1 -
 .../spark/sql/execution/datasources/json/JSONRelation.scala   | 1 -
 .../sql/execution/datasources/json/JacksonGenerator.scala     | 3 ---
 .../execution/datasources/parquet/CatalystReadSupport.scala   | 1 -
 .../org/apache/spark/sql/execution/joins/HashSemiJoin.scala   | 1 -
 .../org/apache/spark/sql/execution/joins/HashedRelation.scala | 2 +-
 .../src/main/scala/org/apache/spark/sql/execution/limit.scala | 1 -
 .../apache/spark/sql/execution/local/BinaryHashJoinNode.scala | 1 -
 .../spark/sql/execution/local/BroadcastHashJoinNode.scala     | 1 -
 .../spark/sql/execution/streaming/CompositeOffset.scala       | 2 --
 .../scala/org/apache/spark/sql/expressions/WindowSpec.scala   | 1 -
 .../scala/org/apache/spark/sql/UserDefinedTypeSuite.scala     | 1 -
 .../scala/org/apache/spark/sql/execution/ReferenceSort.scala  | 2 +-
 .../org/apache/spark/sql/execution/SQLExecutionSuite.scala    | 2 --
 .../scala/org/apache/spark/sql/execution/SparkPlanTest.scala  | 3 +--
 .../sql/execution/UnsafeFixedWidthAggregationMapSuite.scala   | 2 +-
 .../apache/spark/sql/execution/UnsafeRowSerializerSuite.scala | 1 -
 .../sql/execution/columnar/PartitionBatchPruningSuite.scala   | 1 -
 .../execution/datasources/parquet/ParquetEncodingSuite.scala  | 1 -
 .../apache/spark/sql/execution/local/HashJoinNodeSuite.scala  | 2 +-
 .../spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala       | 5 +++--
 .../apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala   | 1 -
 .../spark/sql/hive/thriftserver/ui/ThriftServerTab.scala      | 2 +-
 .../scala/org/apache/spark/sql/hive/HiveSessionState.scala    | 1 -
 .../main/scala/org/apache/spark/sql/hive/HiveStrategies.scala | 3 ++-
 .../src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala | 3 ++-
 .../main/scala/org/apache/spark/sql/hive/TableReader.scala    | 6 ++++--
 .../apache/spark/sql/hive/execution/InsertIntoHiveTable.scala | 1 -
 .../src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala   | 3 ++-
 .../scala/org/apache/spark/sql/hive/CachedTableSuite.scala    | 1 -
 .../spark/sql/hive/execution/HiveOperatorQueryableSuite.scala | 2 +-
 .../spark/streaming/api/java/JavaStreamingContext.scala       | 1 -
 .../spark/streaming/api/java/JavaStreamingListener.scala      | 1 -
 .../org/apache/spark/streaming/api/python/PythonDStream.scala | 2 --
 .../apache/spark/streaming/dstream/FlatMapValuedDStream.scala | 1 -
 .../org/apache/spark/streaming/dstream/InputDStream.scala     | 1 -
 .../org/apache/spark/streaming/dstream/MapValuedDStream.scala | 1 -
 .../org/apache/spark/streaming/dstream/RawInputDStream.scala  | 4 ++--
 .../spark/streaming/dstream/ReducedWindowedDStream.scala      | 3 +--
 .../org/apache/spark/streaming/dstream/ShuffledDStream.scala  | 1 -
 .../org/apache/spark/streaming/dstream/StateDStream.scala     | 1 -
 .../org/apache/spark/streaming/rdd/MapWithStateRDD.scala      | 4 ++--
 .../scala/org/apache/spark/streaming/scheduler/JobSet.scala   | 2 --
 .../spark/streaming/scheduler/ReceivedBlockTracker.scala      | 2 +-
 .../org/apache/spark/streaming/scheduler/ReceiverInfo.scala   | 1 -
 .../main/scala/org/apache/spark/streaming/ui/BatchPage.scala  | 2 +-
 .../scala/org/apache/spark/streaming/ui/StreamingPage.scala   | 2 --
 .../org/apache/spark/streaming/StreamingListenerSuite.scala   | 2 +-
 .../spark/streaming/scheduler/RateControllerSuite.scala       | 2 --
 .../apache/spark/deploy/yarn/ApplicationMasterArguments.scala | 1 -
 yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala | 7 ++-----
 .../scala/org/apache/spark/deploy/yarn/YarnRMClient.scala     | 3 +--
 .../deploy/yarn/ClientDistributedCacheManagerSuite.scala      | 3 +--
 .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala | 2 +-
 .../apache/spark/network/shuffle/ShuffleTestAccessor.scala    | 2 +-
 226 files changed, 103 insertions(+), 337 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/checkstyle.xml
----------------------------------------------------------------------
diff --git a/checkstyle.xml b/checkstyle.xml
index b5d1617..a165fee 100644
--- a/checkstyle.xml
+++ b/checkstyle.xml
@@ -166,5 +166,6 @@
             <property name="exceptionVariableName" value="expected"/>
         </module>
         <module name="CommentsIndentation"/>
+        <module name="UnusedImports"/>
     </module>
 </module>

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
index efe0470..f7ffb1b 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
@@ -19,7 +19,6 @@ package org.apache.spark.network.protocol;
 
 import com.google.common.base.Objects;
 import io.netty.buffer.ByteBuf;
-import io.netty.buffer.Unpooled;
 
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.buffer.NettyManagedBuffer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
index 9621379..2b30920 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
@@ -19,7 +19,6 @@ package org.apache.spark.network.protocol;
 
 import com.google.common.base.Objects;
 import io.netty.buffer.ByteBuf;
-import io.netty.buffer.Unpooled;
 
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.buffer.NettyManagedBuffer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
index bae866e..d73014e 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
@@ -19,7 +19,6 @@ package org.apache.spark.network.protocol;
 
 import com.google.common.base.Objects;
 import io.netty.buffer.ByteBuf;
-import io.netty.buffer.Unpooled;
 
 import org.apache.spark.network.buffer.ManagedBuffer;
 import org.apache.spark.network.buffer.NettyManagedBuffer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
index 26747ee..258ef81 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
@@ -20,9 +20,6 @@ package org.apache.spark.network.protocol;
 import com.google.common.base.Objects;
 import io.netty.buffer.ByteBuf;
 
-import org.apache.spark.network.buffer.ManagedBuffer;
-import org.apache.spark.network.buffer.NettyManagedBuffer;
-
 /**
  * Message indicating an error when transferring a stream.
  */

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
index 35af5a8..dc183c0 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
@@ -20,9 +20,6 @@ package org.apache.spark.network.protocol;
 import com.google.common.base.Objects;
 import io.netty.buffer.ByteBuf;
 
-import org.apache.spark.network.buffer.ManagedBuffer;
-import org.apache.spark.network.buffer.NettyManagedBuffer;
-
 /**
  * Request to stream data from the remote end.
  * <p>

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
index 51b8999..87e212f 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
@@ -21,7 +21,6 @@ import com.google.common.base.Objects;
 import io.netty.buffer.ByteBuf;
 
 import org.apache.spark.network.buffer.ManagedBuffer;
-import org.apache.spark.network.buffer.NettyManagedBuffer;
 
 /**
  * Response to {@link StreamRequest} when the stream has been successfully opened.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java
index 127335e..3d71eba 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslEncryption.java
@@ -33,7 +33,6 @@ import io.netty.channel.ChannelPromise;
 import io.netty.channel.FileRegion;
 import io.netty.handler.codec.MessageToMessageDecoder;
 import io.netty.util.AbstractReferenceCounted;
-import io.netty.util.ReferenceCountUtil;
 
 import org.apache.spark.network.util.ByteArrayWritableChannel;
 import org.apache.spark.network.util.NettyUtils;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
index 296ced3..bebe88e 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
@@ -19,7 +19,6 @@ package org.apache.spark.network.server;
 
 import java.nio.ByteBuffer;
 
-import com.google.common.base.Preconditions;
 import com.google.common.base.Throwables;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelFuture;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/util/NettyUtils.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/NettyUtils.java b/common/network-common/src/main/java/org/apache/spark/network/util/NettyUtils.java
index caa7260..10de9d3 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/util/NettyUtils.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/util/NettyUtils.java
@@ -31,8 +31,6 @@ import io.netty.channel.epoll.EpollSocketChannel;
 import io.netty.channel.nio.NioEventLoopGroup;
 import io.netty.channel.socket.nio.NioServerSocketChannel;
 import io.netty.channel.socket.nio.NioSocketChannel;
-import io.netty.handler.codec.ByteToMessageDecoder;
-import io.netty.handler.codec.LengthFieldBasedFrameDecoder;
 import io.netty.util.internal.PlatformDependent;
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
----------------------------------------------------------------------
diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
index a466c72..3f7024a 100644
--- a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
+++ b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java
@@ -17,7 +17,6 @@
 
 package org.apache.spark.network.util;
 
-import java.util.Iterator;
 import java.util.LinkedList;
 
 import com.google.common.base.Preconditions;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
----------------------------------------------------------------------
diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
index 351c793..cdce297 100644
--- a/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
+++ b/common/network-shuffle/src/main/java/org/apache/spark/network/sasl/ShuffleSecretManager.java
@@ -24,7 +24,6 @@ import java.util.concurrent.ConcurrentHashMap;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.spark.network.sasl.SecretKeyHolder;
 import org.apache.spark.network.util.JavaUtils;
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleSecuritySuite.java
----------------------------------------------------------------------
diff --git a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleSecuritySuite.java b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleSecuritySuite.java
index 08ddb37..acc1168 100644
--- a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleSecuritySuite.java
+++ b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/ExternalShuffleSecuritySuite.java
@@ -30,7 +30,6 @@ import org.apache.spark.network.TestUtils;
 import org.apache.spark.network.TransportContext;
 import org.apache.spark.network.sasl.SaslServerBootstrap;
 import org.apache.spark.network.sasl.SecretKeyHolder;
-import org.apache.spark.network.server.RpcHandler;
 import org.apache.spark.network.server.TransportServer;
 import org.apache.spark.network.server.TransportServerBootstrap;
 import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java b/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java
index f8f2b22..f7a6c68 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/PackedRecordPointer.java
@@ -17,8 +17,6 @@
 
 package org.apache.spark.shuffle.sort;
 
-import org.apache.spark.memory.TaskMemoryManager;
-
 /**
  * Wrapper around an 8-byte word that holds a 24-bit partition number and 40-bit record pointer.
  * <p>
@@ -28,7 +26,7 @@ import org.apache.spark.memory.TaskMemoryManager;
  * </pre>
  * This implies that the maximum addressable page size is 2^27 bits = 128 megabytes, assuming that
  * our offsets in pages are not 8-byte-word-aligned. Since we have 2^13 pages (based off the
- * 13-bit page numbers assigned by {@link TaskMemoryManager}), this
+ * 13-bit page numbers assigned by {@link org.apache.spark.memory.TaskMemoryManager}), this
  * implies that we can address 2^13 * 128 megabytes = 1 terabyte of RAM per task.
  * <p>
  * Assuming word-alignment would allow for a 1 gigabyte maximum page size, but we leave this

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
index 128a825..3f4402b 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java
@@ -25,7 +25,6 @@ import java.util.Iterator;
 import scala.Option;
 import scala.Product2;
 import scala.collection.JavaConverters;
-import scala.collection.immutable.Map;
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index d2bf297..c2a8f42 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -20,7 +20,6 @@ package org.apache.spark.util.collection.unsafe.sort;
 import com.google.common.primitives.UnsignedLongs;
 
 import org.apache.spark.annotation.Private;
-import org.apache.spark.unsafe.Platform;
 import org.apache.spark.unsafe.types.ByteArray;
 import org.apache.spark.unsafe.types.UTF8String;
 import org.apache.spark.util.Utils;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java
index dbf6770..de92b8d 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RecordPointerAndKeyPrefix.java
@@ -17,11 +17,9 @@
 
 package org.apache.spark.util.collection.unsafe.sort;
 
-import org.apache.spark.memory.TaskMemoryManager;
-
 final class RecordPointerAndKeyPrefix {
   /**
-   * A pointer to a record; see {@link TaskMemoryManager} for a
+   * A pointer to a record; see {@link org.apache.spark.memory.TaskMemoryManager} for a
    * description of how these addresses are encoded.
    */
   public long recordPointer;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
index db143d7..9b8279f 100644
--- a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
+++ b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
@@ -26,7 +26,7 @@ import com.codahale.metrics.{Gauge, MetricRegistry}
 
 import org.apache.spark.metrics.source.Source
 import org.apache.spark.scheduler._
-import org.apache.spark.util.{Clock, SystemClock, ThreadUtils, Utils}
+import org.apache.spark.util.{Clock, SystemClock, ThreadUtils}
 
 /**
  * An agent that dynamically allocates and removes executors based on the workload.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala b/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
index 45b20c0..7f474ed 100644
--- a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
+++ b/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
@@ -22,7 +22,6 @@ import java.util.concurrent.{ScheduledFuture, TimeUnit}
 import scala.collection.mutable
 import scala.concurrent.Future
 
-import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.rpc.{RpcCallContext, RpcEnv, ThreadSafeRpcEndpoint}
 import org.apache.spark.scheduler._
 import org.apache.spark.storage.BlockManagerId

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/Partitioner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/Partitioner.scala b/core/src/main/scala/org/apache/spark/Partitioner.scala
index 976c19f..98c3abe 100644
--- a/core/src/main/scala/org/apache/spark/Partitioner.scala
+++ b/core/src/main/scala/org/apache/spark/Partitioner.scala
@@ -21,13 +21,13 @@ import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
 
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
-import scala.reflect.{classTag, ClassTag}
+import scala.reflect.ClassTag
 import scala.util.hashing.byteswap32
 
 import org.apache.spark.rdd.{PartitionPruningRDD, RDD}
 import org.apache.spark.serializer.JavaSerializer
 import org.apache.spark.util.{CollectionsUtils, Utils}
-import org.apache.spark.util.random.{SamplingUtils, XORShiftRandom}
+import org.apache.spark.util.random.SamplingUtils
 
 /**
  * An object that defines how the elements in a key-value pair RDD are partitioned by key.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index b503c61..9f5a72b 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -51,7 +51,6 @@ import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil}
 import org.apache.spark.input.{FixedLengthBinaryInputFormat, PortableDataStream, StreamInputFormat,
   WholeTextFileInputFormat}
 import org.apache.spark.io.CompressionCodec
-import org.apache.spark.metrics.MetricsSystem
 import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
 import org.apache.spark.rdd._
 import org.apache.spark.rpc.RpcEndpointRef

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala b/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala
index 9df6106..0cb93f1 100644
--- a/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala
+++ b/core/src/main/scala/org/apache/spark/TaskNotSerializableException.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark
 
-import org.apache.spark.annotation.DeveloperApi
-
 /**
  * Exception thrown when a task cannot be serialized.
  */

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
index ed31277..20d6c93 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.api.java
 
-import java.util.Comparator
-
 import scala.language.implicitConversions
 import scala.reflect.ClassTag
 
@@ -191,7 +189,6 @@ class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
    * Return this RDD sorted by the given key function.
    */
   def sortBy[S](f: JFunction[T, S], ascending: Boolean, numPartitions: Int): JavaRDD[T] = {
-    import scala.collection.JavaConverters._
     def fn: (T) => S = (x: T) => f.call(x)
     import com.google.common.collect.Ordering  // shadows scala.math.Ordering
     implicit val ordering = Ordering.natural().asInstanceOf[Ordering[S]]

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
index 7f35ac4..fd7b4fc 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
@@ -21,7 +21,6 @@ import scala.reflect.ClassTag
 
 import org.apache.spark.SecurityManager
 import org.apache.spark.SparkConf
-import org.apache.spark.annotation.DeveloperApi
 
 /**
  * An interface for all the broadcast implementations in Spark (to allow

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
index 4911c3b..81718e0 100644
--- a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
@@ -26,7 +26,7 @@ import scala.collection.JavaConverters._
 
 import com.google.common.io.{ByteStreams, Files}
 
-import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.Logging
 import org.apache.spark.api.r.RUtils
 import org.apache.spark.util.{RedirectThread, Utils}
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala
index b197dbc..8d5edae 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/DriverInfo.scala
@@ -19,7 +19,6 @@ package org.apache.spark.deploy.master
 
 import java.util.Date
 
-import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.deploy.DriverDescription
 import org.apache.spark.util.Utils
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index 1c24c63..283db6c 100755
--- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -37,7 +37,7 @@ import org.apache.spark.deploy.master.{DriverState, Master}
 import org.apache.spark.deploy.worker.ui.WorkerWebUI
 import org.apache.spark.metrics.MetricsSystem
 import org.apache.spark.rpc._
-import org.apache.spark.util.{SignalLogger, ThreadUtils, Utils}
+import org.apache.spark.util.{ThreadUtils, Utils}
 
 private[deploy] class Worker(
     override val rpcEnv: RpcEnv,

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
index 0ca9064..09ae64a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.deploy.worker.ui
 
 import java.io.File
-import java.net.URI
 import javax.servlet.http.HttpServletRequest
 
 import scala.xml.Node

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
index bd61d04..c960660 100644
--- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.rdd
 
 import org.apache.spark.{Logging, TaskContext}
-import org.apache.spark.annotation.Experimental
 import org.apache.spark.partial.BoundedDouble
 import org.apache.spark.partial.MeanEvaluator
 import org.apache.spark.partial.PartialResult

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
index 469962d..8cbe80d 100644
--- a/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.rdd
 
-import java.sql.{Connection, PreparedStatement, ResultSet}
+import java.sql.{Connection, ResultSet}
 
 import scala.reflect.ClassTag
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala
index a163bbd..503aa0d 100644
--- a/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/LocalCheckpointRDD.scala
@@ -19,7 +19,7 @@ package org.apache.spark.rdd
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{Partition, SparkContext, SparkEnv, SparkException, TaskContext}
+import org.apache.spark.{Partition, SparkContext, SparkException, TaskContext}
 import org.apache.spark.storage.RDDBlockId
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala
index cac6cbe..92f625f 100644
--- a/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ReliableRDDCheckpointData.scala
@@ -22,7 +22,6 @@ import scala.reflect.ClassTag
 import org.apache.hadoop.fs.Path
 
 import org.apache.spark._
-import org.apache.spark.util.SerializableConfiguration
 
 /**
  * An implementation of checkpointing that writes the RDD data to reliable storage.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala b/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala
index 89eda85..c83a632 100644
--- a/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala
+++ b/core/src/main/scala/org/apache/spark/rpc/netty/NettyRpcEnv.scala
@@ -17,7 +17,6 @@
 package org.apache.spark.rpc.netty
 
 import java.io._
-import java.lang.{Boolean => JBoolean}
 import java.net.{InetSocketAddress, URI}
 import java.nio.ByteBuffer
 import java.nio.channels.{Pipe, ReadableByteChannel, WritableByteChannel}

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
index a3d2db3..949e88f 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
@@ -19,7 +19,6 @@ package org.apache.spark.scheduler
 
 import java.util.Properties
 
-import org.apache.spark.TaskContext
 import org.apache.spark.util.CallSite
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
index d5cd2da..a3845c6 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
@@ -19,11 +19,9 @@ package org.apache.spark.scheduler
 
 import java.util.Properties
 
-import scala.collection.Map
 import scala.language.existentials
 
 import org.apache.spark._
-import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.CallSite
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
index fccd6e0..8477a66 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
@@ -17,7 +17,6 @@
 
 package org.apache.spark.scheduler
 
-import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
 import org.apache.spark.storage.BlockManagerId
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
index 29e469c..8d5c11d 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.rpc.RpcEndpointRef
 import org.apache.spark.scheduler.ExecutorLossReason
-import org.apache.spark.util.{SerializableBuffer, Utils}
+import org.apache.spark.util.SerializableBuffer
 
 private[spark] sealed trait CoarseGrainedClusterMessage extends Serializable
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
index 90c0728..95bdf0c 100644
--- a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
@@ -23,9 +23,9 @@ import javax.annotation.concurrent.NotThreadSafe
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{SparkConf, SparkEnv}
+import org.apache.spark.SparkEnv
 import org.apache.spark.annotation.{DeveloperApi, Private}
-import org.apache.spark.util.{ByteBufferInputStream, NextIterator, Utils}
+import org.apache.spark.util.NextIterator
 
 /**
  * :: DeveloperApi ::

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala b/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala
index 0a65bbf..04e4cf8 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/BaseShuffleHandle.scala
@@ -17,8 +17,7 @@
 
 package org.apache.spark.shuffle
 
-import org.apache.spark.{Aggregator, Partitioner, ShuffleDependency}
-import org.apache.spark.serializer.Serializer
+import org.apache.spark.ShuffleDependency
 
 /**
  * A basic ShuffleHandle implementation that just captures registerShuffle's parameters.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala b/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala
index 81aea33..d1ecbc1 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockResolver.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.shuffle
 
-import java.nio.ByteBuffer
-
 import org.apache.spark.network.buffer.ManagedBuffer
 import org.apache.spark.storage.ShuffleBlockId
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala b/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala
index 28bcced..7694e95 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleWriter.scala
@@ -20,7 +20,6 @@ package org.apache.spark.shuffle.hash
 import java.io.IOException
 
 import org.apache.spark._
-import org.apache.spark.executor.ShuffleWriteMetrics
 import org.apache.spark.scheduler.MapStatus
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.shuffle._

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala
index 7eb3d96..4a7b1f0 100644
--- a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala
+++ b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.shuffle.sort
 
 import org.apache.spark._
-import org.apache.spark.executor.ShuffleWriteMetrics
 import org.apache.spark.scheduler.MapStatus
 import org.apache.spark.shuffle.{BaseShuffleHandle, IndexShuffleBlockResolver, ShuffleWriter}
 import org.apache.spark.storage.ShuffleBlockId

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index 0b7aa59..1cb027a 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -19,7 +19,7 @@ package org.apache.spark.storage
 
 import scala.collection.Iterable
 import scala.collection.generic.CanBuildFrom
-import scala.concurrent.{Await, Future}
+import scala.concurrent.Future
 
 import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.rpc.RpcEndpointRef

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
index bfa6560..db12a4a 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
@@ -22,7 +22,6 @@ import java.nio.ByteBuffer
 import java.nio.channels.FileChannel.MapMode
 
 import org.apache.spark.Logging
-import org.apache.spark.serializer.Serializer
 import org.apache.spark.util.Utils
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index bc143b7..6b36012 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -21,7 +21,7 @@ import java.net.{URI, URL}
 import javax.servlet.DispatcherType
 import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
 
-import scala.collection.mutable.{ArrayBuffer, StringBuilder}
+import scala.collection.mutable.ArrayBuffer
 import scala.language.implicitConversions
 import scala.xml.Node
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
index edc6670..cc476d6 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorThreadDumpPage.scala
@@ -19,7 +19,6 @@ package org.apache.spark.ui.exec
 
 import javax.servlet.http.HttpServletRequest
 
-import scala.util.Try
 import scala.xml.{Node, Text}
 
 import org.apache.spark.ui.{UIUtils, WebUIPage}

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
index 52587d2..83ded92 100644
--- a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
+++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala
@@ -20,7 +20,6 @@ package org.apache.spark.util
 import java.lang.management.ManagementFactory
 import java.lang.reflect.{Field, Modifier}
 import java.util.{IdentityHashMap, Random}
-import java.util.concurrent.ConcurrentHashMap
 
 import scala.collection.mutable.ArrayBuffer
 import scala.runtime.ScalaRunTime

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala
index 4c1e161..6b74a29 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/AppendOnlyMap.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.util.collection
 
-import java.util.{Arrays, Comparator}
+import java.util.Comparator
 
 import com.google.common.hash.Hashing
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
index aa15e79..1692df7 100644
--- a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
+++ b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
@@ -17,9 +17,6 @@
 
 package org.apache.spark.launcher;
 
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java b/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java
index 3d50ab4..8aa0636 100644
--- a/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java
+++ b/core/src/test/java/org/apache/spark/serializer/TestJavaSerializerImpl.java
@@ -21,7 +21,6 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 
-import scala.Option;
 import scala.reflect.ClassTag;
 
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 6546def..ddf4876 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer
 import org.mockito.Matchers.{any, isA}
 import org.mockito.Mockito._
 
-import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEndpointRef, RpcEnv}
+import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEnv}
 import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
 import org.apache.spark.shuffle.FetchFailedException
 import org.apache.spark.storage.{BlockManagerId, ShuffleBlockId}

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
index fa35819..159b448 100644
--- a/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
@@ -20,11 +20,8 @@ package org.apache.spark
 import java.io.File
 import javax.net.ssl.SSLContext
 
-import com.google.common.io.Files
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.spark.util.Utils
-
 class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
   test("test resolving property file as spark conf ") {

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index fc31b78..b66aba9 100644
--- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark
 
-import java.util.concurrent.{Semaphore, TimeUnit}
+import java.util.concurrent.Semaphore
 import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
 
-import org.apache.spark.scheduler._
-
 /**
  * Holds state shared across task threads in some ThreadingSuite tests.
  */

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
index f416ace..cbdf175 100644
--- a/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/LogUrlsStandaloneSuite.scala
@@ -22,7 +22,7 @@ import java.net.URL
 import scala.collection.mutable
 import scala.io.Source
 
-import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
 import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
 import org.apache.spark.scheduler.cluster.ExecutorInfo
 import org.apache.spark.util.SparkConfWithEnv

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
index 6587793..379c038 100644
--- a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark.deploy.client
 
 import java.util.concurrent.ConcurrentLinkedQueue
 
-import scala.collection.JavaConverters._
 import scala.concurrent.duration._
 
 import org.scalatest.BeforeAndAfterAll

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 3baa2e2..8e8007f 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -24,16 +24,14 @@ import java.util.concurrent.TimeUnit
 import java.util.zip.{ZipInputStream, ZipOutputStream}
 
 import scala.concurrent.duration._
-import scala.io.Source
 import scala.language.postfixOps
 
 import com.google.common.base.Charsets
 import com.google.common.io.{ByteStreams, Files}
-import org.apache.hadoop.fs.Path
 import org.apache.hadoop.hdfs.DistributedFileSystem
 import org.json4s.jackson.JsonMethods._
 import org.mockito.Matchers.any
-import org.mockito.Mockito.{doReturn, mock, spy, verify, when}
+import org.mockito.Mockito.{mock, spy, verify}
 import org.scalatest.BeforeAndAfter
 import org.scalatest.Matchers
 import org.scalatest.concurrent.Eventually._
@@ -45,8 +43,6 @@ import org.apache.spark.util.{Clock, JsonProtocol, ManualClock, Utils}
 
 class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
 
-  import FsHistoryProvider._
-
   private var testDir: File = null
 
   before {

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
index d852255..88b3a0e 100644
--- a/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/input/WholeTextFileRecordReaderSuite.scala
@@ -24,7 +24,7 @@ import java.io.FileOutputStream
 import scala.collection.immutable.IndexedSeq
 
 import org.apache.hadoop.io.Text
-import org.apache.hadoop.io.compress.{CompressionCodecFactory, DefaultCodec, GzipCodec}
+import org.apache.hadoop.io.compress.{CompressionCodecFactory, GzipCodec}
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.{Logging, SparkConf, SparkContext, SparkFunSuite}

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala b/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
index 639d1da..713560d 100644
--- a/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/launcher/LauncherBackendSuite.scala
@@ -26,7 +26,6 @@ import org.scalatest.Matchers
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark._
-import org.apache.spark.launcher._
 
 class LauncherBackendSuite extends SparkFunSuite with Matchers {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
index e5cb9d3..6dad3f4 100644
--- a/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
+++ b/core/src/test/scala/org/apache/spark/memory/TestMemoryManager.scala
@@ -17,10 +17,8 @@
 
 package org.apache.spark.memory
 
-import scala.collection.mutable
-
 import org.apache.spark.SparkConf
-import org.apache.spark.storage.{BlockId, BlockStatus}
+import org.apache.spark.storage.BlockId
 
 class TestMemoryManager(conf: SparkConf)
   extends MemoryManager(conf, numCores = 1, Long.MaxValue, Long.MaxValue) {

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 2204800..43e6124 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.rpc
 import java.io.{File, NotSerializableException}
 import java.nio.charset.StandardCharsets.UTF_8
 import java.util.UUID
-import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeoutException, TimeUnit}
+import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeUnit}
 
 import scala.collection.mutable
 import scala.collection.JavaConverters._

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala b/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
index 12113be..e553956 100644
--- a/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala
@@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicInteger
 import org.mockito.Mockito._
 
 import org.apache.spark.SparkFunSuite
-import org.apache.spark.rpc.{RpcAddress, RpcEndpoint, RpcEnv, TestRpcEndpoint}
+import org.apache.spark.rpc.{RpcAddress, TestRpcEndpoint}
 
 class InboxSuite extends SparkFunSuite {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
index 5db7535..2df0540 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackendSuite.scala
@@ -33,7 +33,7 @@ import org.scalatest.BeforeAndAfter
 
 import org.apache.spark.{LocalSparkContext, SecurityManager, SparkConf, SparkContext, SparkFunSuite}
 import org.apache.spark.network.shuffle.mesos.MesosExternalShuffleClient
-import org.apache.spark.rpc.{RpcEndpointRef}
+import org.apache.spark.rpc.RpcEndpointRef
 import org.apache.spark.scheduler.TaskSchedulerImpl
 
 class CoarseMesosSchedulerBackendSuite extends SparkFunSuite

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/ui/UISuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index 69c4605..2b59b48 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -21,7 +21,6 @@ import java.net.{BindException, ServerSocket}
 
 import scala.io.Source
 
-import org.eclipse.jetty.server.Server
 import org.eclipse.jetty.servlet.ServletContextHandler
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.time.SpanSugar._

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala b/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala
index 60fb7ab..75e4504 100644
--- a/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala
+++ b/core/src/test/scala/org/apache/spark/util/ResetSystemProperties.scala
@@ -22,8 +22,6 @@ import java.util.Properties
 import org.apache.commons.lang3.SerializationUtils
 import org.scalatest.{BeforeAndAfterEach, Suite}
 
-import org.apache.spark.SparkFunSuite
-
 /**
  * Mixin for automatically resetting system properties that are modified in ScalaTest tests.
  * This resets the properties after each individual test.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index 49088aa..c342b68 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.util
 
 import scala.collection.mutable.ArrayBuffer
 
-import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, PrivateMethodTester}
+import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
 
 import org.apache.spark.SparkFunSuite
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
----------------------------------------------------------------------
diff --git a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
index b5416d7..8a0f938 100644
--- a/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
+++ b/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
@@ -17,12 +17,10 @@
 
 package org.apache.spark.sql.jdbc
 
-import java.math.BigDecimal
-import java.sql.{Connection, Date, Timestamp}
+import java.sql.Connection
 import java.util.Properties
 
 import org.apache.spark.sql.test.SharedSQLContext
-import org.apache.spark.sql.types._
 import org.apache.spark.tags.DockerTest
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
index 0448a1a..1a6caa8 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
@@ -28,7 +28,6 @@ import org.apache.spark.api.java.function.Function2;
 import org.apache.spark.api.java.function.PairFunction;
 
 import java.io.Serializable;
-import java.util.Collections;
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java
----------------------------------------------------------------------
diff --git a/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java b/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java
index 5ba01e0..4717155 100644
--- a/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java
+++ b/examples/src/main/java/org/apache/spark/examples/mllib/JavaMultiLabelClassificationMetricsExample.java
@@ -25,10 +25,8 @@ import scala.Tuple2;
 
 import org.apache.spark.api.java.*;
 import org.apache.spark.mllib.evaluation.MultilabelMetrics;
-import org.apache.spark.rdd.RDD;
 import org.apache.spark.SparkConf;
 // $example off$
-import org.apache.spark.SparkContext;
 
 public class JavaMultiLabelClassificationMetricsExample {
   public static void main(String[] args) {

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala b/examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
index e37a3fa..743fc13 100644
--- a/examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/DFSReadWriteTest.scala
@@ -23,7 +23,6 @@ import java.io.File
 import scala.io.Source._
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
   * Simple test for reading and writing to a distributed

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
index fa4a3af..08b6c71 100644
--- a/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples
 import java.util.Random
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
   * Usage: GroupByTest [numMappers] [numKVPairs] [KeySize] [numReducers]

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
index 407e3e0..19bebff 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
@@ -25,8 +25,6 @@ import scala.collection.mutable.HashSet
 
 import breeze.linalg.{squaredDistance, DenseVector, Vector}
 
-import org.apache.spark.SparkContext._
-
 /**
  * K-means clustering.
  *

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala b/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
index 3d92362..720d92f 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
@@ -20,9 +20,6 @@ package org.apache.spark.examples
 
 import scala.math.random
 
-import org.apache.spark._
-import org.apache.spark.SparkContext._
-
 object LocalPi {
   def main(args: Array[String]) {
     var count = 0

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
index a80de10..c55b68e 100644
--- a/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
@@ -19,7 +19,6 @@
 package org.apache.spark.examples
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
  * Executes a roll up-style query against Apache logs.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
index 3b0b00f..7c09664 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples
 import java.util.Random
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
   * Usage: SimpleSkewedGroupByTest [numMappers] [numKVPairs] [valSize] [numReducers] [ratio]

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
index 719e217..7796f36 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples
 import java.util.Random
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
   * Usage: GroupByTest [numMappers] [numKVPairs] [KeySize] [numReducers]

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
index 1ea9121..d9f94a4 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples
 import breeze.linalg.{squaredDistance, DenseVector, Vector}
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
  * K-means clustering.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
index 018bdf6..2664ddb 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
@@ -19,7 +19,6 @@
 package org.apache.spark.examples
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
  * Computes the PageRank of URLs from an input file. Input file should

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
index b92740f..fc7a1f8 100644
--- a/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
@@ -22,7 +22,6 @@ import scala.collection.mutable
 import scala.util.Random
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 
 /**
  * Transitive closure on a graph.

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/graphx/SynthBenchmark.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/graphx/SynthBenchmark.scala b/examples/src/main/scala/org/apache/spark/examples/graphx/SynthBenchmark.scala
index 41ca5cb..6d2228c 100644
--- a/examples/src/main/scala/org/apache/spark/examples/graphx/SynthBenchmark.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/graphx/SynthBenchmark.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples.graphx
 import java.io.{FileOutputStream, PrintWriter}
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.graphx.{GraphXUtils, PartitionStrategy}
 import org.apache.spark.graphx.util.GraphGenerators
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
index 50998c9..25be878 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/LinearRegressionExample.scala
@@ -18,15 +18,13 @@
 // scalastyle:off println
 package org.apache.spark.examples.ml
 
-import scala.collection.mutable
 import scala.language.reflectiveCalls
 
 import scopt.OptionParser
 
 import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.examples.mllib.AbstractParams
-import org.apache.spark.ml.{Pipeline, PipelineStage}
-import org.apache.spark.ml.regression.{LinearRegression, LinearRegressionModel}
+import org.apache.spark.ml.regression.LinearRegression
 import org.apache.spark.sql.DataFrame
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
index eda211b..5ff3d36 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/CosineSimilarity.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples.mllib
 import scopt.OptionParser
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.mllib.linalg.Vectors
 import org.apache.spark.mllib.linalg.distributed.{MatrixEntry, RowMatrix}
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
index 69691ae..09750e5 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
@@ -24,7 +24,6 @@ import org.apache.log4j.{Level, Logger}
 import scopt.OptionParser
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.mllib.recommendation.{ALS, MatrixFactorizationModel, Rating}
 import org.apache.spark.rdd.RDD
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b5f02d67/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
index 011db4f..0da4005 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/SampledRDDs.scala
@@ -21,7 +21,6 @@ package org.apache.spark.examples.mllib
 import scopt.OptionParser
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.mllib.util.MLUtils
 
 /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org