You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ad...@apache.org on 2014/02/18 23:44:51 UTC

[3/3] git commit: Optimized imports

Optimized imports

Optimized imports and arranged according to scala style guide @
https://cwiki.apache.org/confluence/display/SPARK/Spark+Code+Style+Guide#SparkCodeStyleGuide-Imports

Author: NirmalReddy <ni...@imaginea.com>
Author: NirmalReddy <ni...@yahoo.com>

Closes #613 from NirmalReddy/opt-imports and squashes the following commits:

578b4f5 [NirmalReddy] imported java.lang.Double as JDouble
a2cbcc5 [NirmalReddy] addressed the comments
776d664 [NirmalReddy] Optimized imports in core


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/ccb327a4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/ccb327a4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/ccb327a4

Branch: refs/heads/master
Commit: ccb327a49a7323efd98a33223c438a670bba7cec
Parents: f74ae0e
Author: NirmalReddy <ni...@imaginea.com>
Authored: Tue Feb 18 14:44:36 2014 -0800
Committer: Aaron Davidson <aa...@databricks.com>
Committed: Tue Feb 18 14:44:36 2014 -0800

----------------------------------------------------------------------
 .../apache/spark/network/netty/FileClient.java  |  4 +-
 .../spark/network/netty/FileServerHandler.java  |  4 +-
 .../mapreduce/SparkHadoopMapReduceUtil.scala    |  3 +-
 .../scala/org/apache/spark/Accumulators.scala   |  3 +-
 .../apache/spark/BlockStoreShuffleFetcher.scala |  3 +-
 .../scala/org/apache/spark/CacheManager.scala   |  4 +-
 .../scala/org/apache/spark/FutureAction.scala   |  4 +-
 .../scala/org/apache/spark/HttpFileServer.scala |  4 +-
 .../scala/org/apache/spark/HttpServer.scala     |  2 +-
 .../org/apache/spark/MapOutputTracker.scala     |  1 -
 .../org/apache/spark/SerializableWritable.scala |  2 +-
 .../scala/org/apache/spark/ShuffleFetcher.scala |  2 -
 .../main/scala/org/apache/spark/SparkConf.scala |  2 -
 .../scala/org/apache/spark/SparkContext.scala   | 15 +++----
 .../main/scala/org/apache/spark/SparkEnv.scala  |  9 ++--
 .../org/apache/spark/SparkHadoopWriter.scala    |  2 +-
 .../apache/spark/api/java/JavaDoubleRDD.scala   | 44 ++++++++++----------
 .../org/apache/spark/api/java/JavaRDD.scala     |  2 +-
 .../java/function/DoubleFlatMapFunction.java    |  1 -
 .../spark/api/java/function/DoubleFunction.java |  1 -
 .../spark/api/java/function/Function.java       |  5 +--
 .../spark/api/java/function/Function2.java      |  4 +-
 .../spark/api/java/function/Function3.java      |  5 +--
 .../api/java/function/PairFlatMapFunction.java  |  4 +-
 .../spark/api/java/function/PairFunction.java   |  4 +-
 .../spark/api/python/PythonPartitioner.scala    |  2 -
 .../org/apache/spark/api/python/PythonRDD.scala |  2 +-
 .../spark/api/python/PythonWorkerFactory.scala  |  4 +-
 .../apache/spark/broadcast/HttpBroadcast.scala  |  2 +-
 .../spark/broadcast/TorrentBroadcast.scala      |  1 -
 .../scala/org/apache/spark/deploy/Client.scala  |  2 +-
 .../spark/deploy/FaultToleranceTest.scala       |  4 +-
 .../org/apache/spark/deploy/JsonProtocol.scala  |  3 +-
 .../apache/spark/deploy/LocalSparkCluster.scala |  6 +--
 .../apache/spark/deploy/client/TestClient.scala |  6 +--
 .../spark/deploy/master/ApplicationInfo.scala   |  7 +++-
 .../master/FileSystemPersistenceEngine.scala    |  1 +
 .../org/apache/spark/deploy/master/Master.scala |  5 +--
 .../spark/deploy/master/MasterArguments.scala   |  2 +-
 .../deploy/master/SparkZooKeeperSession.scala   |  2 +-
 .../master/ZooKeeperLeaderElectionAgent.scala   |  2 +-
 .../master/ZooKeeperPersistenceEngine.scala     |  4 +-
 .../deploy/master/ui/ApplicationPage.scala      |  3 +-
 .../spark/deploy/master/ui/IndexPage.scala      |  4 +-
 .../spark/deploy/master/ui/MasterWebUI.scala    |  1 +
 .../spark/deploy/worker/CommandUtils.scala      |  2 +-
 .../spark/deploy/worker/ExecutorRunner.scala    |  3 +-
 .../org/apache/spark/deploy/worker/Worker.scala |  1 -
 .../spark/deploy/worker/WorkerArguments.scala   |  3 +-
 .../spark/deploy/worker/ui/WorkerWebUI.scala    |  4 +-
 .../executor/CoarseGrainedExecutorBackend.scala |  2 +-
 .../apache/spark/executor/ExecutorBackend.scala |  1 +
 .../apache/spark/executor/ExecutorSource.scala  |  5 +--
 .../spark/executor/MesosExecutorBackend.scala   |  4 +-
 .../org/apache/spark/io/CompressionCodec.scala  |  3 +-
 .../apache/spark/metrics/MetricsConfig.scala    |  2 +-
 .../apache/spark/metrics/MetricsSystem.scala    |  6 +--
 .../apache/spark/metrics/sink/ConsoleSink.scala |  4 +-
 .../org/apache/spark/metrics/sink/CsvSink.scala |  4 +-
 .../apache/spark/metrics/sink/GangliaSink.scala |  2 +-
 .../spark/metrics/sink/GraphiteSink.scala       |  4 +-
 .../org/apache/spark/metrics/sink/JmxSink.scala |  4 +-
 .../spark/metrics/sink/MetricsServlet.scala     |  8 ++--
 .../apache/spark/network/BufferMessage.scala    |  1 -
 .../org/apache/spark/network/Connection.scala   | 11 ++---
 .../spark/network/ConnectionManager.scala       | 13 +++---
 .../spark/network/ConnectionManagerId.scala     |  1 -
 .../spark/network/ConnectionManagerTest.scala   |  9 ++--
 .../org/apache/spark/network/Message.scala      |  3 +-
 .../org/apache/spark/network/MessageChunk.scala |  1 -
 .../spark/network/MessageChunkHeader.scala      |  1 -
 .../org/apache/spark/network/ReceiverTest.scala |  2 +-
 .../org/apache/spark/network/SenderTest.scala   |  2 +-
 .../apache/spark/network/netty/FileHeader.scala |  2 +-
 .../spark/network/netty/ShuffleCopier.scala     |  7 ++--
 .../spark/network/netty/ShuffleSender.scala     |  1 -
 .../partial/ApproximateActionListener.scala     |  2 +-
 .../spark/partial/GroupedCountEvaluator.scala   |  4 +-
 .../spark/partial/GroupedMeanEvaluator.scala    |  5 +--
 .../spark/partial/GroupedSumEvaluator.scala     |  5 +--
 .../scala/org/apache/spark/rdd/BlockRDD.scala   |  2 +-
 .../org/apache/spark/rdd/CartesianRDD.scala     |  5 ++-
 .../org/apache/spark/rdd/CheckpointRDD.scala    |  7 +++-
 .../org/apache/spark/rdd/CoGroupedRDD.scala     |  2 +-
 .../org/apache/spark/rdd/CoalescedRDD.scala     |  7 ++--
 .../apache/spark/rdd/DoubleRDDFunctions.scala   |  4 +-
 .../org/apache/spark/rdd/FilteredRDD.scala      |  3 +-
 .../org/apache/spark/rdd/FlatMappedRDD.scala    |  2 +-
 .../apache/spark/rdd/FlatMappedValuesRDD.scala  |  3 +-
 .../scala/org/apache/spark/rdd/GlommedRDD.scala |  3 +-
 .../scala/org/apache/spark/rdd/HadoopRDD.scala  |  3 +-
 .../org/apache/spark/rdd/MapPartitionsRDD.scala |  3 +-
 .../org/apache/spark/rdd/MappedValuesRDD.scala  |  3 +-
 .../org/apache/spark/rdd/PairRDDFunctions.scala | 10 ++---
 .../spark/rdd/ParallelCollectionRDD.scala       |  7 ++--
 .../apache/spark/rdd/PartitionPruningRDD.scala  |  3 +-
 .../spark/rdd/PartitionerAwareUnionRDD.scala    |  5 ++-
 .../spark/rdd/PartitionwiseSampledRDD.scala     |  2 +-
 .../scala/org/apache/spark/rdd/PipedRDD.scala   |  5 +--
 .../main/scala/org/apache/spark/rdd/RDD.scala   | 15 +++----
 .../apache/spark/rdd/RDDCheckpointData.scala    |  3 +-
 .../scala/org/apache/spark/rdd/SampledRDD.scala |  3 +-
 .../spark/rdd/SequenceFileRDDFunctions.scala    |  8 ++--
 .../org/apache/spark/rdd/ShuffledRDD.scala      |  3 +-
 .../org/apache/spark/rdd/SubtractedRDD.scala    |  9 ++--
 .../scala/org/apache/spark/rdd/UnionRDD.scala   |  6 +--
 .../apache/spark/rdd/ZippedPartitionsRDD.scala  |  6 ++-
 .../scala/org/apache/spark/rdd/ZippedRDD.scala  |  6 +--
 .../apache/spark/rdd/ZippedWithIndexRDD.scala   |  2 +-
 .../org/apache/spark/scheduler/ActiveJob.scala  |  4 +-
 .../apache/spark/scheduler/DAGScheduler.scala   |  2 +-
 .../spark/scheduler/DAGSchedulerEvent.scala     |  2 +-
 .../spark/scheduler/DAGSchedulerSource.scala    |  2 +-
 .../spark/scheduler/InputFormatInfo.scala       | 14 +++----
 .../org/apache/spark/scheduler/JobLogger.scala  |  4 +-
 .../org/apache/spark/scheduler/MapStatus.scala  |  3 +-
 .../org/apache/spark/scheduler/ResultTask.scala |  2 +-
 .../apache/spark/scheduler/Schedulable.scala    |  3 +-
 .../spark/scheduler/SchedulableBuilder.scala    |  4 +-
 .../spark/scheduler/SchedulerBackend.scala      |  2 -
 .../apache/spark/scheduler/ShuffleMapTask.scala |  5 +--
 .../apache/spark/scheduler/SparkListener.scala  |  3 +-
 .../scala/org/apache/spark/scheduler/Task.scala |  1 -
 .../spark/scheduler/TaskDescription.scala       |  1 +
 .../org/apache/spark/scheduler/TaskInfo.scala   |  2 -
 .../apache/spark/scheduler/TaskLocality.scala   |  1 -
 .../org/apache/spark/scheduler/TaskResult.scala |  7 ++--
 .../spark/scheduler/TaskResultGetter.scala      |  1 -
 .../spark/scheduler/TaskSchedulerImpl.scala     |  4 +-
 .../apache/spark/scheduler/TaskSetManager.scala |  4 +-
 .../cluster/CoarseGrainedClusterMessage.scala   |  3 +-
 .../cluster/CoarseGrainedSchedulerBackend.scala |  4 +-
 .../cluster/SparkDeploySchedulerBackend.scala   |  4 +-
 .../mesos/CoarseMesosSchedulerBackend.scala     |  7 ++--
 .../cluster/mesos/MesosSchedulerBackend.scala   |  7 ++--
 .../spark/scheduler/local/LocalBackend.scala    |  2 +-
 .../spark/serializer/JavaSerializer.scala       |  2 +-
 .../spark/serializer/KryoSerializer.scala       |  8 ++--
 .../apache/spark/serializer/Serializer.scala    |  3 +-
 .../spark/serializer/SerializerManager.scala    |  2 +-
 .../spark/storage/BlockFetcherIterator.scala    |  5 +--
 .../org/apache/spark/storage/BlockManager.scala | 13 +++---
 .../apache/spark/storage/BlockManagerId.scala   |  1 +
 .../spark/storage/BlockManagerMaster.scala      |  2 +-
 .../spark/storage/BlockManagerMasterActor.scala |  2 +-
 .../spark/storage/BlockManagerMessages.scala    |  1 -
 .../spark/storage/BlockManagerSlaveActor.scala  |  1 -
 .../spark/storage/BlockManagerSource.scala      |  3 +-
 .../spark/storage/BlockManagerWorker.scala      |  2 +-
 .../org/apache/spark/storage/BlockMessage.scala |  2 +-
 .../org/apache/spark/storage/BlockStore.scala   |  1 +
 .../org/apache/spark/storage/DiskStore.scala    |  1 -
 .../org/apache/spark/storage/MemoryStore.scala  |  7 ++--
 .../spark/storage/StoragePerfTester.scala       |  4 +-
 .../org/apache/spark/storage/StorageUtils.scala |  4 +-
 .../apache/spark/storage/ThreadingTest.scala    |  7 ++--
 .../scala/org/apache/spark/ui/JettyUtils.scala  | 10 ++---
 .../scala/org/apache/spark/ui/SparkUI.scala     |  6 +--
 .../apache/spark/ui/UIWorkloadGenerator.scala   |  1 -
 .../org/apache/spark/ui/env/EnvironmentUI.scala |  5 +--
 .../org/apache/spark/ui/exec/ExecutorsUI.scala  |  3 +-
 .../apache/spark/ui/jobs/ExecutorTable.scala    |  2 +-
 .../org/apache/spark/ui/jobs/IndexPage.scala    |  1 -
 .../spark/ui/jobs/JobProgressListener.scala     |  1 -
 .../apache/spark/ui/jobs/JobProgressUI.scala    | 14 ++-----
 .../org/apache/spark/ui/jobs/PoolPage.scala     |  6 +--
 .../org/apache/spark/ui/jobs/StagePage.scala    |  5 +--
 .../org/apache/spark/ui/jobs/StageTable.scala   |  3 +-
 .../spark/ui/storage/BlockManagerUI.scala       |  2 -
 .../org/apache/spark/ui/storage/IndexPage.scala |  2 +-
 .../org/apache/spark/ui/storage/RDDPage.scala   |  3 +-
 .../scala/org/apache/spark/util/AkkaUtils.scala |  2 +-
 .../spark/util/BoundedPriorityQueue.scala       |  3 +-
 .../spark/util/ByteBufferInputStream.scala      |  1 +
 .../org/apache/spark/util/ClosureCleaner.scala  |  4 +-
 .../org/apache/spark/util/MetadataCleaner.scala |  4 +-
 .../org/apache/spark/util/MutablePair.scala     |  1 -
 .../apache/spark/util/SerializableBuffer.scala  |  2 +-
 .../spark/util/SerializableHyperLogLog.scala    |  5 ++-
 .../org/apache/spark/util/SizeEstimator.scala   | 11 +++--
 .../apache/spark/util/TimeStampedHashMap.scala  |  5 ++-
 .../apache/spark/util/TimeStampedHashSet.scala  |  4 +-
 .../scala/org/apache/spark/util/Utils.scala     | 12 ++----
 .../scala/org/apache/spark/util/Vector.scala    |  1 +
 .../apache/spark/util/collection/BitSet.scala   |  1 -
 .../util/collection/ExternalAppendOnlyMap.scala |  2 +-
 .../spark/util/random/RandomSampler.scala       |  1 +
 .../spark/util/random/XORShiftRandom.scala      |  1 +
 .../org/apache/spark/AccumulatorSuite.scala     |  7 ++--
 .../org/apache/spark/CheckpointSuite.scala      |  7 +++-
 .../org/apache/spark/DistributedSuite.scala     |  9 ++--
 .../scala/org/apache/spark/DriverSuite.scala    |  1 +
 .../scala/org/apache/spark/FailureSuite.scala   |  2 +-
 .../org/apache/spark/FileServerSuite.scala      |  3 +-
 .../test/scala/org/apache/spark/FileSuite.scala |  9 ++--
 .../scala/org/apache/spark/JavaAPISuite.java    |  5 +--
 .../org/apache/spark/JobCancellationSuite.scala |  3 +-
 .../org/apache/spark/LocalSparkContext.scala    |  7 ++--
 .../apache/spark/MapOutputTrackerSuite.scala    |  5 ++-
 .../org/apache/spark/PartitioningSuite.scala    |  3 +-
 .../scala/org/apache/spark/PipedRDDSuite.scala  |  1 -
 .../org/apache/spark/SharedSparkContext.scala   |  2 +-
 .../org/apache/spark/ShuffleNettySuite.scala    |  1 -
 .../scala/org/apache/spark/ShuffleSuite.scala   |  5 +--
 .../apache/spark/SparkContextInfoSuite.scala    |  1 -
 .../scala/org/apache/spark/ThreadingSuite.scala |  3 --
 .../scala/org/apache/spark/UnpersistSuite.scala |  3 +-
 .../apache/spark/ZippedPartitionsSuite.scala    |  9 ----
 .../spark/api/python/PythonRDDSuite.scala       |  5 +--
 .../apache/spark/deploy/JsonProtocolSuite.scala |  2 +-
 .../spark/deploy/worker/DriverRunnerTest.scala  |  6 +--
 .../deploy/worker/ExecutorRunnerTest.scala      |  2 +-
 .../deploy/worker/WorkerWatcherSuite.scala      |  5 +--
 .../apache/spark/io/CompressionCodecSuite.scala |  2 +-
 .../spark/metrics/MetricsSystemSuite.scala      |  3 +-
 .../apache/spark/rdd/AsyncRDDActionsSuite.scala |  1 -
 .../org/apache/spark/rdd/DoubleRDDSuite.scala   |  6 +--
 .../org/apache/spark/rdd/JdbcRDDSuite.scala     |  7 ++--
 .../spark/rdd/PairRDDFunctionsSuite.scala       |  3 +-
 .../rdd/ParallelCollectionSplitSuite.scala      |  4 +-
 .../spark/rdd/PartitionPruningRDDSuite.scala    |  2 +-
 .../rdd/PartitionwiseSampledRDDSuite.scala      |  1 +
 .../scala/org/apache/spark/rdd/RDDSuite.scala   |  8 ++--
 .../org/apache/spark/rdd/SortingSuite.scala     |  1 -
 .../spark/scheduler/ClusterSchedulerSuite.scala |  8 ++--
 .../spark/scheduler/DAGSchedulerSuite.scala     |  3 +-
 .../apache/spark/scheduler/JobLoggerSuite.scala |  6 ---
 .../spark/scheduler/TaskContextSuite.scala      |  7 ++--
 .../spark/scheduler/TaskResultGetterSuite.scala |  2 +-
 .../spark/scheduler/TaskSetManagerSuite.scala   |  3 +-
 .../spark/serializer/KryoSerializerSuite.scala  |  4 +-
 .../spark/storage/BlockManagerSuite.scala       |  7 ++--
 .../spark/storage/DiskBlockManagerSuite.scala   |  3 +-
 .../scala/org/apache/spark/ui/UISuite.scala     |  6 ++-
 .../ui/jobs/JobProgressListenerSuite.scala      |  4 +-
 .../apache/spark/util/ClosureCleanerSuite.scala |  4 +-
 .../apache/spark/util/NextIteratorSuite.scala   |  6 ++-
 .../apache/spark/util/SizeEstimatorSuite.scala  |  3 +-
 .../org/apache/spark/util/UtilsSuite.scala      | 10 +++--
 .../util/collection/AppendOnlyMapSuite.scala    |  3 +-
 .../spark/util/collection/BitSetSuite.scala     |  1 -
 .../util/collection/OpenHashMapSuite.scala      |  2 +
 .../util/collection/OpenHashSetSuite.scala      |  1 -
 .../PrimitiveKeyOpenHashMapSuite.scala          |  2 +
 .../spark/util/random/RandomSamplerSuite.scala  |  6 +--
 .../spark/util/random/XORShiftRandomSuite.scala |  1 +
 246 files changed, 446 insertions(+), 552 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/java/org/apache/spark/network/netty/FileClient.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/network/netty/FileClient.java b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
index d2d778b..0d31894 100644
--- a/core/src/main/java/org/apache/spark/network/netty/FileClient.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
@@ -17,6 +17,8 @@
 
 package org.apache.spark.network.netty;
 
+import java.util.concurrent.TimeUnit;
+
 import io.netty.bootstrap.Bootstrap;
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelOption;
@@ -27,8 +29,6 @@ import io.netty.channel.socket.oio.OioSocketChannel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.concurrent.TimeUnit;
-
 class FileClient {
 
   private static final Logger LOG = LoggerFactory.getLogger(FileClient.class.getName());

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
index 3ac045f..c0133e1 100644
--- a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
@@ -23,11 +23,11 @@ import java.io.FileInputStream;
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.channel.SimpleChannelInboundHandler;
 import io.netty.channel.DefaultFileRegion;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.spark.storage.BlockId;
 import org.apache.spark.storage.FileSegment;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 class FileServerHandler extends SimpleChannelInboundHandler<String> {
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
index 32429f0..1fca572 100644
--- a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
+++ b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala
@@ -17,7 +17,8 @@
 
 package org.apache.hadoop.mapreduce
 
-import java.lang.{Integer => JInteger, Boolean => JBoolean}
+import java.lang.{Boolean => JBoolean, Integer => JInteger}
+
 import org.apache.hadoop.conf.Configuration
 
 private[apache]

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/Accumulators.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala
index df01b2e..73dd471 100644
--- a/core/src/main/scala/org/apache/spark/Accumulators.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulators.scala
@@ -19,8 +19,9 @@ package org.apache.spark
 
 import java.io.{ObjectInputStream, Serializable}
 
-import scala.collection.mutable.Map
 import scala.collection.generic.Growable
+import scala.collection.mutable.Map
+
 import org.apache.spark.serializer.JavaSerializer
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
index d9ed572..754b46a 100644
--- a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
@@ -20,12 +20,11 @@ package org.apache.spark
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
 
-import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.executor.ShuffleReadMetrics
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.storage.{BlockId, BlockManagerId, ShuffleBlockId}
 import org.apache.spark.util.CompletionIterator
 
-
 private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging {
 
   override def fetch[T](

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/CacheManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/CacheManager.scala b/core/src/main/scala/org/apache/spark/CacheManager.scala
index b38af24..1daabec 100644
--- a/core/src/main/scala/org/apache/spark/CacheManager.scala
+++ b/core/src/main/scala/org/apache/spark/CacheManager.scala
@@ -18,9 +18,9 @@
 package org.apache.spark
 
 import scala.collection.mutable.{ArrayBuffer, HashSet}
-import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, RDDBlockId}
-import org.apache.spark.rdd.RDD
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.{BlockManager, RDDBlockId, StorageLevel}
 
 /** Spark class responsible for passing RDDs split contents to the BlockManager and making
     sure a node doesn't load two copies of an RDD at once.

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/FutureAction.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/FutureAction.scala b/core/src/main/scala/org/apache/spark/FutureAction.scala
index d7d1028..f2decd1 100644
--- a/core/src/main/scala/org/apache/spark/FutureAction.scala
+++ b/core/src/main/scala/org/apache/spark/FutureAction.scala
@@ -21,10 +21,8 @@ import scala.concurrent._
 import scala.concurrent.duration.Duration
 import scala.util.Try
 
-import org.apache.spark.scheduler.{JobSucceeded, JobWaiter}
-import org.apache.spark.scheduler.JobFailed
 import org.apache.spark.rdd.RDD
-
+import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}
 
 /**
  * A future for the result of an action to support cancellation. This is an extension of the

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/HttpFileServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
index a885898..d3264a4 100644
--- a/core/src/main/scala/org/apache/spark/HttpFileServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
@@ -17,8 +17,10 @@
 
 package org.apache.spark
 
-import java.io.{File}
+import java.io.File
+
 import com.google.common.io.Files
+
 import org.apache.spark.util.Utils
 
 private[spark] class HttpFileServer extends Logging {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/HttpServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala
index 69a738d..759e68e 100644
--- a/core/src/main/scala/org/apache/spark/HttpServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpServer.scala
@@ -18,7 +18,6 @@
 package org.apache.spark
 
 import java.io.File
-import java.net.InetAddress
 
 import org.eclipse.jetty.server.Server
 import org.eclipse.jetty.server.bio.SocketConnector
@@ -26,6 +25,7 @@ import org.eclipse.jetty.server.handler.DefaultHandler
 import org.eclipse.jetty.server.handler.HandlerList
 import org.eclipse.jetty.server.handler.ResourceHandler
 import org.eclipse.jetty.util.thread.QueuedThreadPool
+
 import org.apache.spark.util.Utils
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 8d6db0f..5968973 100644
--- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -22,7 +22,6 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream}
 
 import scala.collection.mutable.HashSet
 import scala.concurrent.Await
-import scala.concurrent.duration._
 
 import akka.actor._
 import akka.pattern.ask

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/SerializableWritable.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SerializableWritable.scala b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
index fdd4c24..dff665c 100644
--- a/core/src/main/scala/org/apache/spark/SerializableWritable.scala
+++ b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
@@ -19,9 +19,9 @@ package org.apache.spark
 
 import java.io._
 
+import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.io.ObjectWritable
 import org.apache.hadoop.io.Writable
-import org.apache.hadoop.conf.Configuration
 
 class SerializableWritable[T <: Writable](@transient var t: T) extends Serializable {
   def value = t

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
index a85aa50..e8f756c 100644
--- a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
@@ -17,10 +17,8 @@
 
 package org.apache.spark
 
-import org.apache.spark.executor.TaskMetrics
 import org.apache.spark.serializer.Serializer
 
-
 private[spark] abstract class ShuffleFetcher {
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 45d19bc..b947feb 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -20,8 +20,6 @@ package org.apache.spark
 import scala.collection.JavaConverters._
 import scala.collection.mutable.HashMap
 
-import java.io.{ObjectInputStream, ObjectOutputStream, IOException}
-
 /**
  * Configuration for a Spark application. Used to set various Spark parameters as key-value pairs.
  *

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 5a6d06b..a24f07e 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -19,21 +19,18 @@ package org.apache.spark
 
 import java.io._
 import java.net.URI
-import java.util.{UUID, Properties}
+import java.util.{Properties, UUID}
 import java.util.concurrent.atomic.AtomicInteger
 
 import scala.collection.{Map, Set}
 import scala.collection.generic.Growable
-
 import scala.collection.mutable.{ArrayBuffer, HashMap}
 import scala.reflect.{ClassTag, classTag}
 
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
-import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable,
-  FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
-import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat,
-  TextInputFormat}
+import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable, FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable}
+import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, TextInputFormat}
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, Job => NewHadoopJob}
 import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFormat}
 import org.apache.mesos.MesosNativeLibrary
@@ -42,14 +39,12 @@ import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil}
 import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
 import org.apache.spark.rdd._
 import org.apache.spark.scheduler._
-import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend,
-  SparkDeploySchedulerBackend, SimrSchedulerBackend}
+import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkDeploySchedulerBackend, SimrSchedulerBackend}
 import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
 import org.apache.spark.scheduler.local.LocalBackend
 import org.apache.spark.storage.{BlockManagerSource, RDDInfo, StorageStatus, StorageUtils}
 import org.apache.spark.ui.SparkUI
-import org.apache.spark.util.{Utils, TimeStampedHashMap, MetadataCleaner, MetadataCleanerType,
-  ClosureCleaner}
+import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedHashMap, Utils}
 
 /**
  * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/SparkEnv.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 6ae020f..7ac6582 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -21,16 +21,15 @@ import scala.collection.mutable
 import scala.concurrent.Await
 
 import akka.actor._
+import com.google.common.collect.MapMaker
 
+import org.apache.spark.api.python.PythonWorkerFactory
 import org.apache.spark.broadcast.BroadcastManager
 import org.apache.spark.metrics.MetricsSystem
-import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
+import org.apache.spark.storage.{BlockManager, BlockManagerMaster, BlockManagerMasterActor}
 import org.apache.spark.network.ConnectionManager
 import org.apache.spark.serializer.{Serializer, SerializerManager}
-import org.apache.spark.util.{Utils, AkkaUtils}
-import org.apache.spark.api.python.PythonWorkerFactory
-
-import com.google.common.collect.MapMaker
+import org.apache.spark.util.{AkkaUtils, Utils}
 
 /**
  * Holds all the runtime environment objects for a running Spark instance (either master or worker),

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 4e63117..d404459 100644
--- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -18,8 +18,8 @@
 package org.apache.hadoop.mapred
 
 import java.io.IOException
-import java.text.SimpleDateFormat
 import java.text.NumberFormat
+import java.text.SimpleDateFormat
 import java.util.Date
 
 import org.apache.hadoop.fs.FileSystem

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
index 33737e1..0710444 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
@@ -17,27 +17,25 @@
 
 package org.apache.spark.api.java
 
+import java.lang.{Double => JDouble}
+
 import scala.reflect.ClassTag
 
-import org.apache.spark.rdd.RDD
+import org.apache.spark.Partitioner
 import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
 import org.apache.spark.api.java.function.{Function => JFunction}
-import org.apache.spark.util.StatCounter
 import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.StatCounter
 
-import java.lang.Double
-import org.apache.spark.Partitioner
-
-import scala.collection.JavaConverters._
-
-class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, JavaDoubleRDD] {
+class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[JDouble, JavaDoubleRDD] {
 
-  override val classTag: ClassTag[Double] = implicitly[ClassTag[Double]]
+  override val classTag: ClassTag[JDouble] = implicitly[ClassTag[JDouble]]
 
-  override val rdd: RDD[Double] = srdd.map(x => Double.valueOf(x))
+  override val rdd: RDD[JDouble] = srdd.map(x => JDouble.valueOf(x))
 
-  override def wrapRDD(rdd: RDD[Double]): JavaDoubleRDD =
+  override def wrapRDD(rdd: RDD[JDouble]): JavaDoubleRDD =
     new JavaDoubleRDD(rdd.map(_.doubleValue))
 
   // Common RDD functions
@@ -67,7 +65,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   def unpersist(blocking: Boolean): JavaDoubleRDD = fromRDD(srdd.unpersist(blocking))
 
   // first() has to be overriden here in order for its return type to be Double instead of Object.
-  override def first(): Double = srdd.first()
+  override def first(): JDouble = srdd.first()
 
   // Transformations (return a new RDD)
 
@@ -84,7 +82,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   /**
    * Return a new RDD containing only the elements that satisfy a predicate.
    */
-  def filter(f: JFunction[Double, java.lang.Boolean]): JavaDoubleRDD =
+  def filter(f: JFunction[JDouble, java.lang.Boolean]): JavaDoubleRDD =
     fromRDD(srdd.filter(x => f(x).booleanValue()))
 
   /**
@@ -133,7 +131,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   /**
    * Return a sampled subset of this RDD.
    */
-  def sample(withReplacement: Boolean, fraction: Double, seed: Int): JavaDoubleRDD =
+  def sample(withReplacement: Boolean, fraction: JDouble, seed: Int): JavaDoubleRDD =
     fromRDD(srdd.sample(withReplacement, fraction, seed))
 
   /**
@@ -145,7 +143,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   // Double RDD functions
 
   /** Add up the elements in this RDD. */
-  def sum(): Double = srdd.sum()
+  def sum(): JDouble = srdd.sum()
 
   /**
    * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and
@@ -154,35 +152,35 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
   def stats(): StatCounter = srdd.stats()
 
   /** Compute the mean of this RDD's elements. */
-  def mean(): Double = srdd.mean()
+  def mean(): JDouble = srdd.mean()
 
   /** Compute the variance of this RDD's elements. */
-  def variance(): Double = srdd.variance()
+  def variance(): JDouble = srdd.variance()
 
   /** Compute the standard deviation of this RDD's elements. */
-  def stdev(): Double = srdd.stdev()
+  def stdev(): JDouble = srdd.stdev()
 
   /**
    * Compute the sample standard deviation of this RDD's elements (which corrects for bias in
    * estimating the standard deviation by dividing by N-1 instead of N).
    */
-  def sampleStdev(): Double = srdd.sampleStdev()
+  def sampleStdev(): JDouble = srdd.sampleStdev()
 
   /**
    * Compute the sample variance of this RDD's elements (which corrects for bias in
    * estimating the standard variance by dividing by N-1 instead of N).
    */
-  def sampleVariance(): Double = srdd.sampleVariance()
+  def sampleVariance(): JDouble = srdd.sampleVariance()
 
   /** Return the approximate mean of the elements in this RDD. */
-  def meanApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] =
+  def meanApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] =
     srdd.meanApprox(timeout, confidence)
 
   /** (Experimental) Approximate operation to return the mean within a timeout. */
   def meanApprox(timeout: Long): PartialResult[BoundedDouble] = srdd.meanApprox(timeout)
 
   /** (Experimental) Approximate operation to return the sum within a timeout. */
-  def sumApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] =
+  def sumApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] =
     srdd.sumApprox(timeout, confidence)
 
   /** (Experimental) Approximate operation to return the sum within a timeout. */
@@ -222,7 +220,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
     srdd.histogram(buckets, false)
   }
 
-  def histogram(buckets: Array[Double], evenBuckets: Boolean): Array[Long] = {
+  def histogram(buckets: Array[JDouble], evenBuckets: Boolean): Array[Long] = {
     srdd.histogram(buckets.map(_.toDouble), evenBuckets)
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
index 7b73057..0055c98 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -20,8 +20,8 @@ package org.apache.spark.api.java
 import scala.reflect.ClassTag
 
 import org.apache.spark._
-import org.apache.spark.rdd.RDD
 import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
 
 class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
index 3e85052..30e6a52 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
@@ -17,7 +17,6 @@
 
 package org.apache.spark.api.java.function;
 
-
 import java.io.Serializable;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
index 5e9b8c4..490da25 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
@@ -17,7 +17,6 @@
 
 package org.apache.spark.api.java.function;
 
-
 import java.io.Serializable;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/Function.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
index 537439e..e0fcd46 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
@@ -17,11 +17,10 @@
 
 package org.apache.spark.api.java.function;
 
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
-
 import java.io.Serializable;
 
+import scala.reflect.ClassTag;
+import scala.reflect.ClassTag$;
 
 /**
  * Base class for functions whose return types do not create special RDDs. PairFunction and

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
index a2d1214..16d7379 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
@@ -17,11 +17,11 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 
-import java.io.Serializable;
-
 /**
  * A two-argument function that takes arguments of type T1 and T2 and returns an R.
  */

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
index fb1dece..096eb71 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
@@ -17,11 +17,10 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
-import scala.runtime.AbstractFunction2;
-
-import java.io.Serializable;
 
 /**
  * A three-argument function that takes arguments of type T1, T2 and T3 and returns an R.

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
index ca485b3..c72b98c 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
@@ -17,12 +17,12 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.Tuple2;
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 
-import java.io.Serializable;
-
 /**
  * A function that returns zero or more key-value pair records from each input record. The
  * key-value pairs are represented as scala.Tuple2 objects.

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
index cbe2306..84b9136 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
@@ -17,12 +17,12 @@
 
 package org.apache.spark.api.java.function;
 
+import java.io.Serializable;
+
 import scala.Tuple2;
 import scala.reflect.ClassTag;
 import scala.reflect.ClassTag$;
 
-import java.io.Serializable;
-
 /**
  * A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs.
  */

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
index 35eca62..95bec50 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.api.python
 
-import java.util.Arrays
-
 import org.apache.spark.Partitioner
 import org.apache.spark.util.Utils
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 33667a9..e4d0285 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -24,9 +24,9 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio
 import scala.collection.JavaConversions._
 import scala.reflect.ClassTag
 
+import org.apache.spark._
 import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
 import org.apache.spark.broadcast.Broadcast
-import org.apache.spark._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.Utils
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index f291266..a5f0f3d 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.api.python
 
-import java.io.{OutputStreamWriter, File, DataInputStream, IOException}
-import java.net.{ServerSocket, Socket, SocketException, InetAddress}
+import java.io.{DataInputStream, File, IOException, OutputStreamWriter}
+import java.net.{InetAddress, ServerSocket, Socket, SocketException}
 
 import scala.collection.JavaConversions._
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
index 39ee0db..20207c2 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
@@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit
 import it.unimi.dsi.fastutil.io.FastBufferedInputStream
 import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
 
-import org.apache.spark.{SparkConf, HttpServer, Logging, SparkEnv}
+import org.apache.spark.{HttpServer, Logging, SparkConf, SparkEnv}
 import org.apache.spark.io.CompressionCodec
 import org.apache.spark.storage.{BroadcastBlockId, StorageLevel}
 import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashSet, Utils}

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
index ec99725..22d783c 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
@@ -26,7 +26,6 @@ import org.apache.spark._
 import org.apache.spark.storage.{BroadcastBlockId, BroadcastHelperBlockId, StorageLevel}
 import org.apache.spark.util.Utils
 
-
 private[spark] class TorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
 extends Broadcast[T](id) with Logging with Serializable {
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/Client.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index 9987e23..eb5676b 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -23,13 +23,13 @@ import scala.concurrent._
 
 import akka.actor._
 import akka.pattern.ask
+import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent}
 import org.apache.log4j.{Level, Logger}
 
 import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.deploy.DeployMessages._
 import org.apache.spark.deploy.master.{DriverState, Master}
 import org.apache.spark.util.{AkkaUtils, Utils}
-import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent}
 
 /**
  * Proxy that relays messages to the driver.

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 7de7c48..190b331 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -21,10 +21,10 @@ import java.io._
 import java.net.URL
 import java.util.concurrent.TimeoutException
 
+import scala.collection.mutable.ListBuffer
 import scala.concurrent.{Await, future, promise}
-import scala.concurrent.duration._
 import scala.concurrent.ExecutionContext.Implicits.global
-import scala.collection.mutable.ListBuffer
+import scala.concurrent.duration._
 import scala.sys.process._
 
 import net.liftweb.json.JsonParser

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index 33e6937..318beb5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -20,10 +20,9 @@ package org.apache.spark.deploy
 import net.liftweb.json.JsonDSL._
 
 import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
-import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo, DriverInfo}
+import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
 import org.apache.spark.deploy.worker.ExecutorRunner
 
-
 private[spark] object JsonProtocol {
  def writeWorkerInfo(obj: WorkerInfo) = {
    ("id" -> obj.id) ~

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index 488843a..a73b459 100644
--- a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.deploy
 
+import scala.collection.mutable.ArrayBuffer
+
 import akka.actor.ActorSystem
 
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.deploy.worker.Worker
 import org.apache.spark.deploy.master.Master
 import org.apache.spark.util.Utils
-import org.apache.spark.{SparkConf, Logging}
-
-import scala.collection.mutable.ArrayBuffer
 
 /**
  * Testing class that creates a Spark standalone process in-cluster (that is, running the

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
index 8017932..1550c3e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.deploy.client
 
-import org.apache.spark.util.{Utils, AkkaUtils}
-import org.apache.spark.{SparkConf, SparkContext, Logging}
-import org.apache.spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.deploy.{ApplicationDescription, Command}
+import org.apache.spark.util.{AkkaUtils, Utils}
 
 private[spark] object TestClient {
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
index 3e26379..e8867bc 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
@@ -17,11 +17,14 @@
 
 package org.apache.spark.deploy.master
 
-import org.apache.spark.deploy.ApplicationDescription
 import java.util.Date
-import akka.actor.ActorRef
+
 import scala.collection.mutable
 
+import akka.actor.ActorRef
+
+import org.apache.spark.deploy.ApplicationDescription
+
 private[spark] class ApplicationInfo(
     val startTime: Long,
     val id: String,

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
index 74bb9eb..aa85aa0 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
@@ -20,6 +20,7 @@ package org.apache.spark.deploy.master
 import java.io._
 
 import akka.serialization.Serialization
+
 import org.apache.spark.Logging
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index e44f90c..51794ce 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -30,15 +30,14 @@ import akka.pattern.ask
 import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
 import akka.serialization.SerializationExtension
 
-
-import org.apache.spark.{SparkConf, Logging, SparkException}
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState}
 import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.DriverState.DriverState
 import org.apache.spark.deploy.master.MasterMessages._
 import org.apache.spark.deploy.master.ui.MasterWebUI
 import org.apache.spark.metrics.MetricsSystem
 import org.apache.spark.util.{AkkaUtils, Utils}
-import org.apache.spark.deploy.master.DriverState.DriverState
 
 private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {
   import context.dispatcher   // to use Akka's scheduler.schedule()

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
index e7f3224..a87781f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.deploy.master
 
-import org.apache.spark.util.{Utils, IntParam}
 import org.apache.spark.SparkConf
+import org.apache.spark.util.{IntParam, Utils}
 
 /**
  * Command-line parser for the master.

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
index 999090a..5775805 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala
@@ -23,7 +23,7 @@ import org.apache.zookeeper._
 import org.apache.zookeeper.Watcher.Event.KeeperState
 import org.apache.zookeeper.data.Stat
 
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
 
 /**
  * Provides a Scala-side interface to the standard ZooKeeper client, with the addition of retry

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
index 77c23fb..47b8f67 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
@@ -21,7 +21,7 @@ import akka.actor.ActorRef
 import org.apache.zookeeper._
 import org.apache.zookeeper.Watcher.Event.EventType
 
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.deploy.master.MasterMessages._
 
 private[spark] class ZooKeeperLeaderElectionAgent(val masterActor: ActorRef,

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
index 10816a1..48b2fc0 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark.deploy.master
 
-import org.apache.spark.{SparkConf, Logging}
+import akka.serialization.Serialization
 import org.apache.zookeeper._
 
-import akka.serialization.Serialization
+import org.apache.spark.{Logging, SparkConf}
 
 class ZooKeeperPersistenceEngine(serialization: Serialization, conf: SparkConf)
   extends PersistenceEngine

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
index f29a6ad..5cc4adb 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.deploy.master.ui
 
+import javax.servlet.http.HttpServletRequest
+
 import scala.concurrent.Await
 import scala.xml.Node
 
 import akka.pattern.ask
-import javax.servlet.http.HttpServletRequest
 import net.liftweb.json.JsonAST.JValue
 
 import org.apache.spark.deploy.JsonProtocol

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
index 04f9a22..01c8f90 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.deploy.master.ui
 
+import javax.servlet.http.HttpServletRequest
+
 import scala.concurrent.Await
-import scala.concurrent.duration._
 import scala.xml.Node
 
 import akka.pattern.ask
-import javax.servlet.http.HttpServletRequest
 import net.liftweb.json.JsonAST.JValue
 
 import org.apache.spark.deploy.{DeployWebUI, JsonProtocol}

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index 05c4df8..5ab13e7 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.deploy.master.ui
 
 import javax.servlet.http.HttpServletRequest
+
 import org.eclipse.jetty.server.{Handler, Server}
 
 import org.apache.spark.Logging

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
index 2ceccc7..0c761df 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.deploy.worker
 
-import java.io.{File, FileOutputStream, IOException, InputStream}
+import java.io.{File, FileOutputStream, InputStream, IOException}
 import java.lang.System._
 
 import org.apache.spark.Logging

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 18885d7..2edd921 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -20,12 +20,11 @@ package org.apache.spark.deploy.worker
 import java.io._
 
 import akka.actor.ActorRef
-
 import com.google.common.base.Charsets
 import com.google.common.io.Files
 
 import org.apache.spark.Logging
-import org.apache.spark.deploy.{ExecutorState, ApplicationDescription, Command}
+import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
 import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index f4ee0e2..7b0b786 100755
--- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -31,7 +31,6 @@ import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.deploy.{ExecutorDescription, ExecutorState}
 import org.apache.spark.deploy.DeployMessages._
 import org.apache.spark.deploy.master.{DriverState, Master}
-import org.apache.spark.deploy.master.DriverState.DriverState
 import org.apache.spark.deploy.worker.ui.WorkerWebUI
 import org.apache.spark.metrics.MetricsSystem
 import org.apache.spark.util.{AkkaUtils, Utils}

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index 3ed528e..d35d5be 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.deploy.worker
 
-import org.apache.spark.util.{Utils, IntParam, MemoryParam}
 import java.lang.management.ManagementFactory
 
+import org.apache.spark.util.{IntParam, MemoryParam, Utils}
+
 /**
  * Command-line parser for the master.
  */

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index 86688e4..bdf126f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -18,11 +18,11 @@
 package org.apache.spark.deploy.worker.ui
 
 import java.io.File
-
 import javax.servlet.http.HttpServletRequest
+
 import org.eclipse.jetty.server.{Handler, Server}
 
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.Logging
 import org.apache.spark.deploy.worker.Worker
 import org.apache.spark.ui.{JettyUtils, UIUtils}
 import org.apache.spark.ui.JettyUtils._

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index 45b43b4..0aae569 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -22,7 +22,7 @@ import java.nio.ByteBuffer
 import akka.actor._
 import akka.remote._
 
-import org.apache.spark.{SparkConf, SparkContext, Logging}
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.deploy.worker.WorkerWatcher
 import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
index ad7dd34..3d34960 100644
--- a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.executor
 
 import java.nio.ByteBuffer
+
 import org.apache.spark.TaskState.TaskState
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
index c2e973e..127f5e9 100644
--- a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
@@ -17,12 +17,11 @@
 
 package org.apache.spark.executor
 
-import com.codahale.metrics.{Gauge, MetricRegistry}
+import scala.collection.JavaConversions._
 
+import com.codahale.metrics.{Gauge, MetricRegistry}
 import org.apache.hadoop.fs.FileSystem
 
-import scala.collection.JavaConversions._
-
 import org.apache.spark.metrics.source.Source
 
 class ExecutorSource(val executor: Executor, executorId: String) extends Source {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
index b56d8c9..6fc702f 100644
--- a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
@@ -20,8 +20,7 @@ package org.apache.spark.executor
 import java.nio.ByteBuffer
 
 import com.google.protobuf.ByteString
-
-import org.apache.mesos.{Executor => MesosExecutor, MesosExecutorDriver, MesosNativeLibrary, ExecutorDriver}
+import org.apache.mesos.{Executor => MesosExecutor, ExecutorDriver, MesosExecutorDriver, MesosNativeLibrary}
 import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _}
 
 import org.apache.spark.Logging
@@ -29,7 +28,6 @@ import org.apache.spark.TaskState
 import org.apache.spark.TaskState.TaskState
 import org.apache.spark.util.Utils
 
-
 private[spark] class MesosExecutorBackend
   extends MesosExecutor
   with ExecutorBackend

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index 5980177..848b5c4 100644
--- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -20,10 +20,9 @@ package org.apache.spark.io
 import java.io.{InputStream, OutputStream}
 
 import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream}
-
 import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
-import org.apache.spark.{SparkEnv, SparkConf}
 
+import org.apache.spark.SparkConf
 
 /**
  * CompressionCodec allows the customization of choosing different compression implementations

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
index e54ac0b..6883a54 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.metrics
 
+import java.io.{FileInputStream, InputStream}
 import java.util.Properties
-import java.io.{File, FileInputStream, InputStream, IOException}
 
 import scala.collection.mutable
 import scala.util.matching.Regex

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index de233e4..966c092 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.metrics
 
-import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
-
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
 import scala.collection.mutable
 
-import org.apache.spark.{SparkConf, Logging}
+import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
+
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.metrics.sink.{MetricsServlet, Sink}
 import org.apache.spark.metrics.source.Source
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
index bce257d..98fa1db 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
-
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
+import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
+
 import org.apache.spark.metrics.MetricsSystem
 
 class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
index 3d1a06a..40f6476 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.{CsvReporter, MetricRegistry}
-
 import java.io.File
 import java.util.{Locale, Properties}
 import java.util.concurrent.TimeUnit
 
+import com.codahale.metrics.{CsvReporter, MetricRegistry}
+
 import org.apache.spark.metrics.MetricsSystem
 
 class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
index b924907..410ca07 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala
@@ -20,8 +20,8 @@ package org.apache.spark.metrics.sink
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 
-import com.codahale.metrics.ganglia.GangliaReporter
 import com.codahale.metrics.MetricRegistry
+import com.codahale.metrics.ganglia.GangliaReporter
 import info.ganglia.gmetric4j.gmetric.GMetric
 
 import org.apache.spark.metrics.MetricsSystem

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
index cdcfec8..e09be00 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.metrics.sink
 
+import java.net.InetSocketAddress
 import java.util.Properties
 import java.util.concurrent.TimeUnit
-import java.net.InetSocketAddress
 
 import com.codahale.metrics.MetricRegistry
-import com.codahale.metrics.graphite.{GraphiteReporter, Graphite}
+import com.codahale.metrics.graphite.{Graphite, GraphiteReporter}
 
 import org.apache.spark.metrics.MetricsSystem
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
index 621d086..b5cf210 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
@@ -17,10 +17,10 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.{JmxReporter, MetricRegistry}
-
 import java.util.Properties
 
+import com.codahale.metrics.{JmxReporter, MetricRegistry}
+
 class JmxSink(val property: Properties, val registry: MetricRegistry) extends Sink {
   val reporter: JmxReporter = JmxReporter.forRegistry(registry).build()
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
index 99357fe..3cdfe26 100644
--- a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
@@ -17,15 +17,13 @@
 
 package org.apache.spark.metrics.sink
 
-import com.codahale.metrics.MetricRegistry
-import com.codahale.metrics.json.MetricsModule
-
-import com.fasterxml.jackson.databind.ObjectMapper
-
 import java.util.Properties
 import java.util.concurrent.TimeUnit
 import javax.servlet.http.HttpServletRequest
 
+import com.codahale.metrics.MetricRegistry
+import com.codahale.metrics.json.MetricsModule
+import com.fasterxml.jackson.databind.ObjectMapper
 import org.eclipse.jetty.server.Handler
 
 import org.apache.spark.ui.JettyUtils

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
index fb4c659..d3c09b1 100644
--- a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
@@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.storage.BlockManager
 
-
 private[spark]
 class BufferMessage(id_ : Int, val buffers: ArrayBuffer[ByteBuffer], var ackId: Int)
   extends Message(Message.BUFFER_MESSAGE, id_) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/Connection.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
index ae2007e..f2e3c1a 100644
--- a/core/src/main/scala/org/apache/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -17,16 +17,13 @@
 
 package org.apache.spark.network
 
-import org.apache.spark._
-
-import scala.collection.mutable.{HashMap, Queue, ArrayBuffer}
-
-import java.io._
+import java.net._
 import java.nio._
 import java.nio.channels._
-import java.nio.channels.spi._
-import java.net._
 
+import scala.collection.mutable.{ArrayBuffer, HashMap, Queue}
+
+import org.apache.spark._
 
 private[spark]
 abstract class Connection(val channel: SocketChannel, val selector: Selector,

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index a78d6ac..3dd82be 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -17,24 +17,21 @@
 
 package org.apache.spark.network
 
-import org.apache.spark._
-
+import java.net._
 import java.nio._
 import java.nio.channels._
 import java.nio.channels.spi._
-import java.net._
 import java.util.concurrent.{LinkedBlockingDeque, TimeUnit, ThreadPoolExecutor}
 
-import scala.collection.mutable.HashSet
+import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.HashMap
+import scala.collection.mutable.HashSet
 import scala.collection.mutable.SynchronizedMap
 import scala.collection.mutable.SynchronizedQueue
-import scala.collection.mutable.ArrayBuffer
-
-import scala.concurrent.{Await, Promise, ExecutionContext, Future}
-import scala.concurrent.duration.Duration
+import scala.concurrent.{Await, ExecutionContext, Future, Promise}
 import scala.concurrent.duration._
 
+import org.apache.spark._
 import org.apache.spark.util.Utils
 
 private[spark] class ConnectionManager(port: Int, conf: SparkConf) extends Logging {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
index 50dd9bc..b82edb6 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
@@ -21,7 +21,6 @@ import java.net.InetSocketAddress
 
 import org.apache.spark.util.Utils
 
-
 private[spark] case class ConnectionManagerId(host: String, port: Int) {
   // DEBUG code
   Utils.checkHost(host)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index 8e5c529..35f6413 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -17,16 +17,13 @@
 
 package org.apache.spark.network
 
-import org.apache.spark._
-import org.apache.spark.SparkContext._
-
-import scala.io.Source
-
 import java.nio.ByteBuffer
-import java.net.InetAddress
 
 import scala.concurrent.Await
 import scala.concurrent.duration._
+import scala.io.Source
+
+import org.apache.spark._
 
 private[spark] object ConnectionManagerTest extends Logging{
   def main(args: Array[String]) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/Message.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/Message.scala b/core/src/main/scala/org/apache/spark/network/Message.scala
index 2612884..20fe676 100644
--- a/core/src/main/scala/org/apache/spark/network/Message.scala
+++ b/core/src/main/scala/org/apache/spark/network/Message.scala
@@ -17,12 +17,11 @@
 
 package org.apache.spark.network
 
-import java.nio.ByteBuffer
 import java.net.InetSocketAddress
+import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-
 private[spark] abstract class Message(val typ: Long, val id: Int) {
   var senderAddress: InetSocketAddress = null
   var started = false

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
index e0fe57b..d0f986a 100644
--- a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer
 
 import scala.collection.mutable.ArrayBuffer
 
-
 private[network]
 class MessageChunk(val header: MessageChunkHeader, val buffer: ByteBuffer) {
 

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
index 235fbc3..9bcbc61 100644
--- a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
@@ -21,7 +21,6 @@ import java.net.InetAddress
 import java.net.InetSocketAddress
 import java.nio.ByteBuffer
 
-
 private[spark] class MessageChunkHeader(
     val typ: Long,
     val id: Int,

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 1c9d603..9976255 100644
--- a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.network
 
 import java.nio.ByteBuffer
-import java.net.InetAddress
+
 import org.apache.spark.SparkConf
 
 private[spark] object ReceiverTest {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/SenderTest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 162d49b..646f842 100644
--- a/core/src/main/scala/org/apache/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.network
 
 import java.nio.ByteBuffer
-import java.net.InetAddress
+
 import org.apache.spark.SparkConf
 
 private[spark] object SenderTest {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/ccb327a4/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
index 1b9fa1e..f9082ff 100644
--- a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
@@ -20,7 +20,7 @@ package org.apache.spark.network.netty
 import io.netty.buffer._
 
 import org.apache.spark.Logging
-import org.apache.spark.storage.{TestBlockId, BlockId}
+import org.apache.spark.storage.{BlockId, TestBlockId}
 
 private[spark] class FileHeader (
   val fileLen: Int,