You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2015/12/31 10:34:16 UTC

spark git commit: [SPARK-3873][STREAMING] Import order fixes for streaming.

Repository: spark
Updated Branches:
  refs/heads/master 5cdecb184 -> efb10cc9a


[SPARK-3873][STREAMING] Import order fixes for streaming.

Also included a few miscelaneous other modules that had very few violations.

Author: Marcelo Vanzin <va...@cloudera.com>

Closes #10532 from vanzin/SPARK-3873-streaming.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/efb10cc9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/efb10cc9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/efb10cc9

Branch: refs/heads/master
Commit: efb10cc9ad370955cec64e8f63a3b646058a9840
Parents: 5cdecb1
Author: Marcelo Vanzin <va...@cloudera.com>
Authored: Thu Dec 31 01:34:13 2015 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Thu Dec 31 01:34:13 2015 -0800

----------------------------------------------------------------------
 .../flume/sink/TransactionProcessor.scala       |  2 +-
 .../streaming/flume/EventTransformer.scala      |  4 ++--
 .../streaming/flume/FlumeInputDStream.scala     | 20 +++++++++-----------
 .../flume/FlumePollingInputDStream.scala        |  2 +-
 .../spark/streaming/flume/FlumeTestUtils.scala  |  2 +-
 .../spark/streaming/flume/FlumeUtils.scala      |  3 +--
 .../streaming/flume/PollingFlumeTestUtils.scala |  4 ++--
 .../spark/streaming/kafka/KafkaCluster.scala    | 11 +++++++----
 .../streaming/kafka/KafkaInputDStream.scala     |  2 +-
 .../apache/spark/streaming/kafka/KafkaRDD.scala | 10 +++++-----
 .../spark/streaming/kafka/KafkaTestUtils.scala  |  4 ++--
 .../spark/streaming/kafka/KafkaUtils.scala      | 10 +++++-----
 .../streaming/kafka/ReliableKafkaReceiver.scala |  6 +++---
 .../streaming/twitter/TwitterInputDStream.scala |  6 +++---
 .../spark/streaming/twitter/TwitterUtils.scala  |  5 +++--
 .../spark/streaming/zeromq/ZeroMQUtils.scala    |  2 +-
 .../apache/spark/repl/ExecutorClassLoader.scala |  7 +++----
 .../org/apache/spark/streaming/Checkpoint.scala |  7 +++----
 .../apache/spark/streaming/DStreamGraph.scala   |  6 ++++--
 .../org/apache/spark/streaming/StateSpec.scala  |  3 ++-
 .../spark/streaming/StreamingContext.scala      |  2 +-
 .../spark/streaming/api/java/JavaDStream.scala  | 12 ++++++------
 .../streaming/api/java/JavaPairDStream.scala    |  2 +-
 .../api/java/JavaPairInputDStream.scala         |  4 ++--
 .../api/java/JavaStreamingContext.scala         |  6 +++---
 .../streaming/api/python/PythonDStream.scala    |  5 ++---
 .../dstream/ConstantInputDStream.scala          |  2 +-
 .../dstream/DStreamCheckpointData.scala         |  8 +++++---
 .../streaming/dstream/FilteredDStream.scala     |  5 +++--
 .../dstream/FlatMapValuedDStream.scala          |  7 ++++---
 .../streaming/dstream/FlatMappedDStream.scala   |  5 +++--
 .../streaming/dstream/ForEachDStream.scala      |  3 ++-
 .../streaming/dstream/GlommedDStream.scala      |  5 +++--
 .../dstream/MapPartitionedDStream.scala         |  5 +++--
 .../streaming/dstream/MapValuedDStream.scala    |  7 ++++---
 .../streaming/dstream/MapWithStateDStream.scala |  2 +-
 .../spark/streaming/dstream/MappedDStream.scala |  5 +++--
 .../dstream/PairDStreamFunctions.scala          |  4 ++--
 .../dstream/PluggableInputDStream.scala         |  3 ++-
 .../streaming/dstream/QueueInputDStream.scala   |  2 +-
 .../streaming/dstream/RawInputDStream.scala     | 15 +++++++--------
 .../dstream/ReceiverInputDStream.scala          |  4 ++--
 .../dstream/ReducedWindowedDStream.scala        | 11 ++++-------
 .../streaming/dstream/ShuffledDStream.scala     |  5 +++--
 .../streaming/dstream/SocketInputDStream.scala  | 13 ++++++-------
 .../spark/streaming/dstream/StateDStream.scala  |  6 +++---
 .../spark/streaming/dstream/UnionDStream.scala  |  3 +--
 .../streaming/dstream/WindowedDStream.scala     |  4 ++--
 .../spark/streaming/rdd/MapWithStateRDD.scala   |  4 ++--
 .../streaming/receiver/BlockGenerator.scala     |  2 +-
 .../receiver/ReceivedBlockHandler.scala         |  4 ++--
 .../spark/streaming/receiver/Receiver.scala     |  2 +-
 .../streaming/receiver/ReceiverSupervisor.scala |  4 ++--
 .../receiver/ReceiverSupervisorImpl.scala       |  2 +-
 .../streaming/scheduler/InputInfoTracker.scala  |  2 +-
 .../apache/spark/streaming/scheduler/Job.scala  |  2 +-
 .../streaming/scheduler/JobGenerator.scala      |  4 ++--
 .../scheduler/ReceivedBlockTracker.scala        |  2 +-
 .../streaming/scheduler/ReceiverTracker.scala   |  4 ++--
 .../streaming/scheduler/StreamingListener.scala |  2 +-
 .../apache/spark/streaming/ui/BatchPage.scala   |  2 +-
 .../ui/StreamingJobProgressListener.scala       | 12 +++---------
 .../spark/streaming/ui/StreamingTab.scala       |  6 +++---
 .../org/apache/spark/streaming/ui/UIUtils.scala |  8 ++++----
 .../streaming/util/BatchedWriteAheadLog.scala   |  2 +-
 .../streaming/util/FileBasedWriteAheadLog.scala |  4 ++--
 .../util/FileBasedWriteAheadLogReader.scala     |  3 ++-
 .../util/RateLimitedOutputStream.scala          |  5 ++---
 .../spark/streaming/util/RawTextSender.scala    |  2 +-
 .../streaming/util/WriteAheadLogUtils.scala     |  2 +-
 .../apache/spark/tools/GenerateMIMAIgnore.scala |  2 +-
 .../tools/JavaAPICompletenessChecker.scala      |  6 +++---
 .../apache/spark/tools/StoragePerfTester.scala  |  2 +-
 73 files changed, 181 insertions(+), 180 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
----------------------------------------------------------------------
diff --git a/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala b/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
index 7ad43b1..b15c209 100644
--- a/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
+++ b/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/TransactionProcessor.scala
@@ -22,7 +22,7 @@ import java.util.concurrent.{Callable, CountDownLatch, TimeUnit}
 
 import scala.util.control.Breaks
 
-import org.apache.flume.{Transaction, Channel}
+import org.apache.flume.{Channel, Transaction}
 
 // Flume forces transactions to be thread-local (horrible, I know!)
 // So the sink basically spawns a new thread to pull the events out within a transaction.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
index 48df27b..5c773d4 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/EventTransformer.scala
@@ -17,12 +17,12 @@
 
 package org.apache.spark.streaming.flume
 
-import java.io.{ObjectOutput, ObjectInput}
+import java.io.{ObjectInput, ObjectOutput}
 
 import scala.collection.JavaConverters._
 
-import org.apache.spark.util.Utils
 import org.apache.spark.Logging
+import org.apache.spark.util.Utils
 
 /**
  * A simple object that provides the implementation of readExternal and writeExternal for both

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
index 2b9116e..1bfa35a 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
@@ -17,29 +17,27 @@
 
 package org.apache.spark.streaming.flume
 
+import java.io.{Externalizable, ObjectInput, ObjectOutput}
 import java.net.InetSocketAddress
-import java.io.{ObjectInput, ObjectOutput, Externalizable}
 import java.nio.ByteBuffer
 import java.util.concurrent.Executors
 
 import scala.collection.JavaConverters._
 import scala.reflect.ClassTag
 
-import org.apache.flume.source.avro.AvroSourceProtocol
-import org.apache.flume.source.avro.AvroFlumeEvent
-import org.apache.flume.source.avro.Status
-import org.apache.avro.ipc.specific.SpecificResponder
 import org.apache.avro.ipc.NettyServer
+import org.apache.avro.ipc.specific.SpecificResponder
+import org.apache.flume.source.avro.{AvroFlumeEvent, AvroSourceProtocol, Status}
+import org.jboss.netty.channel.{ChannelPipeline, ChannelPipelineFactory, Channels}
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
+import org.jboss.netty.handler.codec.compression._
+
 import org.apache.spark.Logging
-import org.apache.spark.util.Utils
 import org.apache.spark.storage.StorageLevel
-import org.apache.spark.streaming.dstream._
 import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming.dstream._
 import org.apache.spark.streaming.receiver.Receiver
-
-import org.jboss.netty.channel.{ChannelPipeline, ChannelPipelineFactory, Channels}
-import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
-import org.jboss.netty.handler.codec.compression._
+import org.apache.spark.util.Utils
 
 private[streaming]
 class FlumeInputDStream[T: ClassTag](

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumePollingInputDStream.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumePollingInputDStream.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumePollingInputDStream.scala
index 6737750..d9c25e8 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumePollingInputDStream.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumePollingInputDStream.scala
@@ -32,8 +32,8 @@ import org.apache.spark.Logging
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.streaming.dstream.ReceiverInputDStream
-import org.apache.spark.streaming.receiver.Receiver
 import org.apache.spark.streaming.flume.sink._
+import org.apache.spark.streaming.receiver.Receiver
 
 /**
  * A [[ReceiverInputDStream]] that can be used to read data from several Flume agents running

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
index fe5dcc8..3f87ce4 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
@@ -29,7 +29,7 @@ import org.apache.avro.ipc.NettyTransceiver
 import org.apache.avro.ipc.specific.SpecificRequestor
 import org.apache.commons.lang3.RandomUtils
 import org.apache.flume.source.avro
-import org.apache.flume.source.avro.{AvroSourceProtocol, AvroFlumeEvent}
+import org.apache.flume.source.avro.{AvroFlumeEvent, AvroSourceProtocol}
 import org.jboss.netty.channel.ChannelPipeline
 import org.jboss.netty.channel.socket.SocketChannel
 import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
index c719b80..3e3ed71 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeUtils.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.streaming.flume
 
+import java.io.{ByteArrayOutputStream, DataOutputStream}
 import java.net.InetSocketAddress
-import java.io.{DataOutputStream, ByteArrayOutputStream}
 import java.util.{List => JList, Map => JMap}
 
 import scala.collection.JavaConverters._
@@ -30,7 +30,6 @@ import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaReceiverInputDStream, JavaStreamingContext}
 import org.apache.spark.streaming.dstream.ReceiverInputDStream
 
-
 object FlumeUtils {
   private val DEFAULT_POLLING_PARALLELISM = 5
   private val DEFAULT_POLLING_BATCH_SIZE = 1000

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
index bfe7548..9515d07 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.streaming.flume
 
-import java.util.concurrent._
 import java.util.{Collections, List => JList, Map => JMap}
+import java.util.concurrent._
 
 import scala.collection.mutable.ArrayBuffer
 
@@ -28,7 +28,7 @@ import org.apache.flume.Context
 import org.apache.flume.channel.MemoryChannel
 import org.apache.flume.conf.Configurables
 
-import org.apache.spark.streaming.flume.sink.{SparkSinkConfig, SparkSink}
+import org.apache.spark.streaming.flume.sink.{SparkSink, SparkSinkConfig}
 
 /**
  * Share codes for Scala and Python unit tests

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaCluster.scala
----------------------------------------------------------------------
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaCluster.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaCluster.scala
index 8465432..c4e18d9 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaCluster.scala
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaCluster.scala
@@ -17,14 +17,17 @@
 
 package org.apache.spark.streaming.kafka
 
-import scala.util.control.NonFatal
-import scala.util.Random
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.JavaConverters._
 import java.util.Properties
+
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ArrayBuffer
+import scala.util.Random
+import scala.util.control.NonFatal
+
 import kafka.api._
 import kafka.common.{ErrorMapping, OffsetAndMetadata, OffsetMetadataAndError, TopicAndPartition}
 import kafka.consumer.{ConsumerConfig, SimpleConsumer}
+
 import org.apache.spark.SparkException
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala
----------------------------------------------------------------------
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala
index 38730fe..67f2360 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaInputDStream.scala
@@ -22,7 +22,7 @@ import java.util.Properties
 import scala.collection.Map
 import scala.reflect.{classTag, ClassTag}
 
-import kafka.consumer.{KafkaStream, Consumer, ConsumerConfig, ConsumerConnector}
+import kafka.consumer.{Consumer, ConsumerConfig, ConsumerConnector, KafkaStream}
 import kafka.serializer.Decoder
 import kafka.utils.VerifiableProperties
 

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaRDD.scala
----------------------------------------------------------------------
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaRDD.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaRDD.scala
index ea5f842..603be22 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaRDD.scala
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaRDD.scala
@@ -20,11 +20,6 @@ package org.apache.spark.streaming.kafka
 import scala.collection.mutable.ArrayBuffer
 import scala.reflect.{classTag, ClassTag}
 
-import org.apache.spark.{Logging, Partition, SparkContext, SparkException, TaskContext}
-import org.apache.spark.partial.{PartialResult, BoundedDouble}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.util.NextIterator
-
 import kafka.api.{FetchRequestBuilder, FetchResponse}
 import kafka.common.{ErrorMapping, TopicAndPartition}
 import kafka.consumer.SimpleConsumer
@@ -32,6 +27,11 @@ import kafka.message.{MessageAndMetadata, MessageAndOffset}
 import kafka.serializer.Decoder
 import kafka.utils.VerifiableProperties
 
+import org.apache.spark.{Logging, Partition, SparkContext, SparkException, TaskContext}
+import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.util.NextIterator
+
 /**
  * A batch-oriented interface for consuming from Kafka.
  * Starting and ending offsets are specified in advance,

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
index 45a6982..a76fa66 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaTestUtils.scala
@@ -20,8 +20,8 @@ package org.apache.spark.streaming.kafka
 import java.io.File
 import java.lang.{Integer => JInt}
 import java.net.InetSocketAddress
-import java.util.concurrent.TimeoutException
 import java.util.{Map => JMap, Properties}
+import java.util.concurrent.TimeoutException
 
 import scala.annotation.tailrec
 import scala.collection.JavaConverters._
@@ -37,9 +37,9 @@ import kafka.utils.{ZKStringSerializer, ZkUtils}
 import org.I0Itec.zkclient.ZkClient
 import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}
 
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.streaming.Time
 import org.apache.spark.util.Utils
-import org.apache.spark.{Logging, SparkConf}
 
 /**
  * This is a helper class for Kafka test suites. This has the functionality to set up

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaUtils.scala
----------------------------------------------------------------------
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaUtils.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaUtils.scala
index fe57222..0cb875c 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaUtils.scala
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/KafkaUtils.scala
@@ -27,19 +27,19 @@ import scala.reflect.ClassTag
 import com.google.common.base.Charsets.UTF_8
 import kafka.common.TopicAndPartition
 import kafka.message.MessageAndMetadata
-import kafka.serializer.{DefaultDecoder, Decoder, StringDecoder}
-import net.razorvine.pickle.{Opcodes, Pickler, IObjectPickler}
+import kafka.serializer.{Decoder, DefaultDecoder, StringDecoder}
+import net.razorvine.pickle.{IObjectPickler, Opcodes, Pickler}
 
-import org.apache.spark.api.java.function.{Function => JFunction}
-import org.apache.spark.streaming.util.WriteAheadLogUtils
 import org.apache.spark.{SparkContext, SparkException}
-import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
+import org.apache.spark.api.java.{JavaPairRDD, JavaRDD, JavaSparkContext}
+import org.apache.spark.api.java.function.{Function => JFunction}
 import org.apache.spark.api.python.SerDeUtil
 import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.streaming.api.java._
 import org.apache.spark.streaming.dstream.{DStream, InputDStream, ReceiverInputDStream}
+import org.apache.spark.streaming.util.WriteAheadLogUtils
 
 object KafkaUtils {
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala
----------------------------------------------------------------------
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala
index 764d170..a872781 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/ReliableKafkaReceiver.scala
@@ -18,10 +18,10 @@
 package org.apache.spark.streaming.kafka
 
 import java.util.Properties
-import java.util.concurrent.{ThreadPoolExecutor, ConcurrentHashMap}
+import java.util.concurrent.{ConcurrentHashMap, ThreadPoolExecutor}
 
-import scala.collection.{Map, mutable}
-import scala.reflect.{ClassTag, classTag}
+import scala.collection.{mutable, Map}
+import scala.reflect.{classTag, ClassTag}
 
 import kafka.common.TopicAndPartition
 import kafka.consumer.{Consumer, ConsumerConfig, ConsumerConnector, KafkaStream}

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala
----------------------------------------------------------------------
diff --git a/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala b/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala
index 9a85a65..a48eec7 100644
--- a/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala
+++ b/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterInputDStream.scala
@@ -19,13 +19,13 @@ package org.apache.spark.streaming.twitter
 
 import twitter4j._
 import twitter4j.auth.Authorization
-import twitter4j.conf.ConfigurationBuilder
 import twitter4j.auth.OAuthAuthorization
+import twitter4j.conf.ConfigurationBuilder
 
+import org.apache.spark.Logging
+import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming._
 import org.apache.spark.streaming.dstream._
-import org.apache.spark.storage.StorageLevel
-import org.apache.spark.Logging
 import org.apache.spark.streaming.receiver.Receiver
 
 /* A stream of Twitter statuses, potentially filtered by one or more keywords.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterUtils.scala
----------------------------------------------------------------------
diff --git a/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterUtils.scala b/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterUtils.scala
index c6a9a2b..3e843e9 100644
--- a/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterUtils.scala
+++ b/external/twitter/src/main/scala/org/apache/spark/streaming/twitter/TwitterUtils.scala
@@ -19,10 +19,11 @@ package org.apache.spark.streaming.twitter
 
 import twitter4j.Status
 import twitter4j.auth.Authorization
+
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.streaming.api.java.{JavaReceiverInputDStream, JavaDStream, JavaStreamingContext}
-import org.apache.spark.streaming.dstream.{ReceiverInputDStream, DStream}
+import org.apache.spark.streaming.api.java.{JavaDStream, JavaReceiverInputDStream, JavaStreamingContext}
+import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
 
 object TwitterUtils {
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/external/zeromq/src/main/scala/org/apache/spark/streaming/zeromq/ZeroMQUtils.scala
----------------------------------------------------------------------
diff --git a/external/zeromq/src/main/scala/org/apache/spark/streaming/zeromq/ZeroMQUtils.scala b/external/zeromq/src/main/scala/org/apache/spark/streaming/zeromq/ZeroMQUtils.scala
index 4ea218e..63cd8a2 100644
--- a/external/zeromq/src/main/scala/org/apache/spark/streaming/zeromq/ZeroMQUtils.scala
+++ b/external/zeromq/src/main/scala/org/apache/spark/streaming/zeromq/ZeroMQUtils.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.streaming.zeromq
 
-import scala.reflect.ClassTag
 import scala.collection.JavaConverters._
+import scala.reflect.ClassTag
 
 import akka.actor.{Props, SupervisorStrategy}
 import akka.util.ByteString

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
----------------------------------------------------------------------
diff --git a/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
index de7b831..2bf1be1 100644
--- a/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.repl
 
-import java.io.{FilterInputStream, ByteArrayOutputStream, InputStream, IOException}
+import java.io.{ByteArrayOutputStream, FilterInputStream, InputStream, IOException}
 import java.net.{HttpURLConnection, URI, URL, URLEncoder}
 import java.nio.channels.Channels
 
@@ -27,10 +27,9 @@ import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.xbean.asm5._
 import org.apache.xbean.asm5.Opcodes._
 
-import org.apache.spark.{SparkConf, SparkEnv, Logging}
+import org.apache.spark.{Logging, SparkConf, SparkEnv}
 import org.apache.spark.deploy.SparkHadoopUtil
-import org.apache.spark.util.Utils
-import org.apache.spark.util.ParentClassLoader
+import org.apache.spark.util.{ParentClassLoader, Utils}
 
 /**
  * A ClassLoader that reads classes from a Hadoop FileSystem or HTTP URI,

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
index d0046af..61b230a 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
@@ -21,15 +21,14 @@ import java.io._
 import java.util.concurrent.Executors
 import java.util.concurrent.RejectedExecutionException
 
-import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.hadoop.conf.Configuration
+import org.apache.hadoop.fs.{FileSystem, Path}
 
-import org.apache.spark.{SparkException, SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.io.CompressionCodec
-import org.apache.spark.util.{MetadataCleaner, Utils}
 import org.apache.spark.streaming.scheduler.JobGenerator
-
+import org.apache.spark.util.{MetadataCleaner, Utils}
 
 private[streaming]
 class Checkpoint(ssc: StreamingContext, val checkpointTime: Time)

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
index 7829f5e..eedb42c 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
@@ -17,11 +17,13 @@
 
 package org.apache.spark.streaming
 
+import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
+
 import scala.collection.mutable.ArrayBuffer
-import java.io.{ObjectInputStream, IOException, ObjectOutputStream}
+
 import org.apache.spark.Logging
+import org.apache.spark.streaming.dstream.{DStream, InputDStream, ReceiverInputDStream}
 import org.apache.spark.streaming.scheduler.Job
-import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream, InputDStream}
 import org.apache.spark.util.Utils
 
 final private[streaming] class DStreamGraph extends Serializable with Logging {

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/StateSpec.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/StateSpec.scala b/streaming/src/main/scala/org/apache/spark/streaming/StateSpec.scala
index 9f6f952..0b09455 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/StateSpec.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StateSpec.scala
@@ -18,12 +18,13 @@
 package org.apache.spark.streaming
 
 import com.google.common.base.Optional
+
+import org.apache.spark.{HashPartitioner, Partitioner}
 import org.apache.spark.annotation.Experimental
 import org.apache.spark.api.java.{JavaPairRDD, JavaUtils}
 import org.apache.spark.api.java.function.{Function3 => JFunction3, Function4 => JFunction4}
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.ClosureCleaner
-import org.apache.spark.{HashPartitioner, Partitioner}
 
 /**
  * :: Experimental ::

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
index b24c0d0..c4a10aa 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
@@ -29,8 +29,8 @@ import akka.actor.{Props, SupervisorStrategy}
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
-import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
 
 import org.apache.spark._
 import org.apache.spark.annotation.{DeveloperApi, Experimental}

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
index 01cdcb0..a59f4ef 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.streaming.api.java
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.api.java.function.{Function => JFunction}
-import org.apache.spark.api.java.JavaRDD
-import org.apache.spark.storage.StorageLevel
-import org.apache.spark.rdd.RDD
-
 import scala.language.implicitConversions
 import scala.reflect.ClassTag
+
+import org.apache.spark.api.java.JavaRDD
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.rdd.RDD
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Duration, Time}
 import org.apache.spark.streaming.dstream.DStream
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
index 42ddd63..2bf3cce 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.streaming.api.java
 
-import java.lang.{Long => JLong, Iterable => JIterable}
+import java.lang.{Iterable => JIterable, Long => JLong}
 import java.util.{List => JList}
 
 import scala.collection.JavaConverters._

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairInputDStream.scala
index e6ff8a0..da0db02 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairInputDStream.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.streaming.api.java
 
-import org.apache.spark.streaming.dstream.InputDStream
-
 import scala.language.implicitConversions
 import scala.reflect.ClassTag
 
+import org.apache.spark.streaming.dstream.InputDStream
+
 /**
  * A Java-friendly interface to [[org.apache.spark.streaming.dstream.InputDStream]] of
  * key-value pairs.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
index 7a50135..00f9d8a 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
@@ -17,14 +17,15 @@
 
 package org.apache.spark.streaming.api.java
 
-import java.lang.{Boolean => JBoolean}
 import java.io.{Closeable, InputStream}
+import java.lang.{Boolean => JBoolean}
 import java.util.{List => JList, Map => JMap}
 
 import scala.collection.JavaConverters._
 import scala.reflect.ClassTag
 
 import akka.actor.{Props, SupervisorStrategy}
+import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
 
@@ -37,10 +38,9 @@ import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming._
-import org.apache.spark.streaming.scheduler.StreamingListener
 import org.apache.spark.streaming.dstream.DStream
 import org.apache.spark.streaming.receiver.Receiver
-import org.apache.hadoop.conf.Configuration
+import org.apache.spark.streaming.scheduler.StreamingListener
 
 /**
  * A Java-friendly version of [[org.apache.spark.streaming.StreamingContext]] which is the main

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala
index 056248c..953fe95 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala
@@ -30,12 +30,11 @@ import org.apache.spark.SparkException
 import org.apache.spark.api.java._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
-import org.apache.spark.streaming.{Interval, Duration, Time}
-import org.apache.spark.streaming.dstream._
+import org.apache.spark.streaming.{Duration, Interval, Time}
 import org.apache.spark.streaming.api.java._
+import org.apache.spark.streaming.dstream._
 import org.apache.spark.util.Utils
 
-
 /**
  * Interface for Python callback function which is used to transform RDDs
  */

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
index 4eb92dd..695384d 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
@@ -20,7 +20,7 @@ package org.apache.spark.streaming.dstream
 import scala.reflect.ClassTag
 
 import org.apache.spark.rdd.RDD
-import org.apache.spark.streaming.{Time, StreamingContext}
+import org.apache.spark.streaming.{StreamingContext, Time}
 
 /**
  * An input stream that always returns the same RDD on each timestep. Useful for testing.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStreamCheckpointData.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStreamCheckpointData.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStreamCheckpointData.scala
index 39fd213..3eff174 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStreamCheckpointData.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStreamCheckpointData.scala
@@ -17,11 +17,13 @@
 
 package org.apache.spark.streaming.dstream
 
+import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
+
 import scala.collection.mutable.HashMap
 import scala.reflect.ClassTag
-import java.io.{ObjectOutputStream, ObjectInputStream, IOException}
-import org.apache.hadoop.fs.Path
-import org.apache.hadoop.fs.FileSystem
+
+import org.apache.hadoop.fs.{FileSystem, Path}
+
 import org.apache.spark.Logging
 import org.apache.spark.streaming.Time
 import org.apache.spark.util.Utils

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
index fcd5216..4307988 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
 import scala.reflect.ClassTag
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class FilteredDStream[T: ClassTag](
     parent: DStream[T],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
index 9d09a3b..778d556 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext._
 import scala.reflect.ClassTag
 
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class FlatMapValuedDStream[K: ClassTag, V: ClassTag, U: ClassTag](
     parent: DStream[(K, V)],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
index 475ea2d..96a444a 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
 import scala.reflect.ClassTag
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class FlatMappedDStream[T: ClassTag, U: ClassTag](
     parent: DStream[T],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
index 4410a99..a0fadee 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.streaming.dstream
 
+import scala.reflect.ClassTag
+
 import org.apache.spark.rdd.RDD
 import org.apache.spark.streaming.{Duration, Time}
 import org.apache.spark.streaming.scheduler.Job
-import scala.reflect.ClassTag
 
 /**
  * An internal DStream used to represent output operations like DStream.foreachRDD.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
index dbb295f..9f1252f 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
 import scala.reflect.ClassTag
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class GlommedDStream[T: ClassTag](parent: DStream[T])
   extends DStream[Array[T]](parent.ssc) {

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
index 5994bc1..bcdf175 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
 import scala.reflect.ClassTag
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class MapPartitionedDStream[T: ClassTag, U: ClassTag](
     parent: DStream[T],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
index 954d2eb..855c3dd 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext._
 import scala.reflect.ClassTag
 
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class MapValuedDStream[K: ClassTag, V: ClassTag, U: ClassTag](
     parent: DStream[(K, V)],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapWithStateDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapWithStateDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapWithStateDStream.scala
index 706465d..36ff9c7 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapWithStateDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapWithStateDStream.scala
@@ -24,8 +24,8 @@ import org.apache.spark.annotation.Experimental
 import org.apache.spark.rdd.{EmptyRDD, RDD}
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming._
-import org.apache.spark.streaming.rdd.{MapWithStateRDD, MapWithStateRDDRecord}
 import org.apache.spark.streaming.dstream.InternalMapWithStateDStream._
+import org.apache.spark.streaming.rdd.{MapWithStateRDD, MapWithStateRDDRecord}
 
 /**
  * :: Experimental ::

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
index fa14b2e..e11d826 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
@@ -17,10 +17,11 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
 import scala.reflect.ClassTag
 
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.{Duration, Time}
+
 private[streaming]
 class MappedDStream[T: ClassTag, U: ClassTag] (
     parent: DStream[T],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/PairDStreamFunctions.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/PairDStreamFunctions.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PairDStreamFunctions.scala
index a64a1fe..babc722 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/PairDStreamFunctions.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PairDStreamFunctions.scala
@@ -24,12 +24,12 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.mapred.{JobConf, OutputFormat}
 import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
 
+import org.apache.spark.{HashPartitioner, Partitioner}
 import org.apache.spark.annotation.Experimental
 import org.apache.spark.rdd.RDD
-import org.apache.spark.streaming.StreamingContext.rddToFileName
 import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext.rddToFileName
 import org.apache.spark.util.{SerializableConfiguration, SerializableJobConf}
-import org.apache.spark.{HashPartitioner, Partitioner}
 
 /**
  * Extra functions available on DStream of (key, value) pairs through an implicit conversion.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
index 002aac9..2442e4c 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
@@ -17,8 +17,9 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.streaming.StreamingContext
 import scala.reflect.ClassTag
+
+import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.streaming.receiver.Receiver
 
 private[streaming]

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
index cd07364..a8d108d 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable.{ArrayBuffer, Queue}
 import scala.reflect.ClassTag
 
 import org.apache.spark.rdd.{RDD, UnionRDD}
-import org.apache.spark.streaming.{Time, StreamingContext}
+import org.apache.spark.streaming.{StreamingContext, Time}
 
 private[streaming]
 class QueueInputDStream[T: ClassTag](

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
index 5a9eda7..ac73dca 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
@@ -17,19 +17,18 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.{Logging, SparkEnv}
-import org.apache.spark.storage.{StorageLevel, StreamBlockId}
-import org.apache.spark.streaming.StreamingContext
-
-import scala.reflect.ClassTag
-
+import java.io.EOFException
 import java.net.InetSocketAddress
 import java.nio.ByteBuffer
 import java.nio.channels.{ReadableByteChannel, SocketChannel}
-import java.io.EOFException
 import java.util.concurrent.ArrayBlockingQueue
-import org.apache.spark.streaming.receiver.Receiver
 
+import scala.reflect.ClassTag
+
+import org.apache.spark.{Logging, SparkEnv}
+import org.apache.spark.storage.{StorageLevel, StreamBlockId}
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming.receiver.Receiver
 
 /**
  * An input stream that reads blocks of serialized objects from a given network address.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala
index 87c20af..a18551f 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReceiverInputDStream.scala
@@ -21,12 +21,12 @@ import scala.reflect.ClassTag
 
 import org.apache.spark.rdd.{BlockRDD, RDD}
 import org.apache.spark.storage.BlockId
+import org.apache.spark.streaming.{StreamingContext, Time}
 import org.apache.spark.streaming.rdd.WriteAheadLogBackedBlockRDD
 import org.apache.spark.streaming.receiver.Receiver
+import org.apache.spark.streaming.scheduler.{RateController, ReceivedBlockInfo, StreamInputInfo}
 import org.apache.spark.streaming.scheduler.rate.RateEstimator
-import org.apache.spark.streaming.scheduler.{ReceivedBlockInfo, RateController, StreamInputInfo}
 import org.apache.spark.streaming.util.WriteAheadLogUtils
-import org.apache.spark.streaming.{StreamingContext, Time}
 
 /**
  * Abstract class for defining any [[org.apache.spark.streaming.dstream.InputDStream]]

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
index 6a583bf..5359549 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
@@ -17,18 +17,15 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.rdd.RDD
-import org.apache.spark.rdd.{CoGroupedRDD, MapPartitionsRDD}
+import scala.collection.mutable.ArrayBuffer
+import scala.reflect.ClassTag
+
 import org.apache.spark.Partitioner
 import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.{CoGroupedRDD, MapPartitionsRDD, RDD}
 import org.apache.spark.storage.StorageLevel
-
-import scala.collection.mutable.ArrayBuffer
 import org.apache.spark.streaming.{Duration, Interval, Time}
 
-import scala.collection.mutable.ArrayBuffer
-import scala.reflect.ClassTag
-
 private[streaming]
 class ReducedWindowedDStream[K: ClassTag, V: ClassTag](
     parent: DStream[(K, V)],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
index e0ffd5d..0fe1544 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
@@ -17,11 +17,12 @@
 
 package org.apache.spark.streaming.dstream
 
+import scala.reflect.ClassTag
+
 import org.apache.spark.Partitioner
-import org.apache.spark.rdd.RDD
 import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
 import org.apache.spark.streaming.{Duration, Time}
-import scala.reflect.ClassTag
 
 private[streaming]
 class ShuffledDStream[K: ClassTag, V: ClassTag, C: ClassTag](

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
index de84e0c..10644b9 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
@@ -17,18 +17,17 @@
 
 package org.apache.spark.streaming.dstream
 
-import scala.util.control.NonFatal
-
-import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.storage.StorageLevel
-import org.apache.spark.util.NextIterator
+import java.io._
+import java.net.{Socket, UnknownHostException}
 
 import scala.reflect.ClassTag
+import scala.util.control.NonFatal
 
-import java.io._
-import java.net.{UnknownHostException, Socket}
 import org.apache.spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.streaming.receiver.Receiver
+import org.apache.spark.util.NextIterator
 
 private[streaming]
 class SocketInputDStream[T: ClassTag](

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
index 621d6df..ebbe139 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.streaming.dstream
 
-import org.apache.spark.rdd.RDD
+import scala.reflect.ClassTag
+
 import org.apache.spark.Partitioner
 import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.RDD
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming.{Duration, Time}
 
-import scala.reflect.ClassTag
-
 private[streaming]
 class StateDStream[K: ClassTag, V: ClassTag, S: ClassTag](
     parent: DStream[(K, V)],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
index d73ffdf..2b07dd6 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
@@ -21,9 +21,8 @@ import scala.collection.mutable.ArrayBuffer
 import scala.reflect.ClassTag
 
 import org.apache.spark.SparkException
+import org.apache.spark.rdd.{RDD, UnionRDD}
 import org.apache.spark.streaming.{Duration, Time}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.rdd.UnionRDD
 
 private[streaming]
 class UnionDStream[T: ClassTag](parents: Array[DStream[T]])

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
index 4efba03..ee50a8d 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
@@ -17,13 +17,13 @@
 
 package org.apache.spark.streaming.dstream
 
+import scala.reflect.ClassTag
+
 import org.apache.spark.rdd.{PartitionerAwareUnionRDD, RDD, UnionRDD}
 import org.apache.spark.storage.StorageLevel
 import org.apache.spark.streaming._
 import org.apache.spark.streaming.Duration
 
-import scala.reflect.ClassTag
-
 private[streaming]
 class WindowedDStream[T: ClassTag](
     parent: DStream[T],

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala b/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala
index fdf6167..1d2244e 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/rdd/MapWithStateRDD.scala
@@ -22,11 +22,11 @@ import java.io.{IOException, ObjectInputStream, ObjectOutputStream}
 import scala.collection.mutable.ArrayBuffer
 import scala.reflect.ClassTag
 
+import org.apache.spark._
 import org.apache.spark.rdd.{MapPartitionsRDD, RDD}
-import org.apache.spark.streaming.{Time, StateImpl, State}
+import org.apache.spark.streaming.{State, StateImpl, Time}
 import org.apache.spark.streaming.util.{EmptyStateMap, StateMap}
 import org.apache.spark.util.Utils
-import org.apache.spark._
 
 /**
  * Record storing the keyed-state [[MapWithStateRDD]]. Each record contains a [[StateMap]] and a

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/receiver/BlockGenerator.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/BlockGenerator.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/BlockGenerator.scala
index cc7c04b..109af32 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/BlockGenerator.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/BlockGenerator.scala
@@ -21,7 +21,7 @@ import java.util.concurrent.{ArrayBlockingQueue, TimeUnit}
 
 import scala.collection.mutable.ArrayBuffer
 
-import org.apache.spark.{SparkException, Logging, SparkConf}
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.storage.StreamBlockId
 import org.apache.spark.streaming.util.RecurringTimer
 import org.apache.spark.util.{Clock, SystemClock}

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
index 5f6c5b0..43c605a 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceivedBlockHandler.scala
@@ -17,18 +17,18 @@
 
 package org.apache.spark.streaming.receiver
 
-import scala.concurrent.duration._
 import scala.concurrent.{Await, ExecutionContext, Future}
+import scala.concurrent.duration._
 import scala.language.{existentials, postfixOps}
 
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 
+import org.apache.spark.{Logging, SparkConf, SparkException}
 import org.apache.spark.storage._
 import org.apache.spark.streaming.receiver.WriteAheadLogBasedBlockHandler._
 import org.apache.spark.streaming.util.{WriteAheadLogRecordHandle, WriteAheadLogUtils}
 import org.apache.spark.util.{Clock, SystemClock, ThreadUtils}
-import org.apache.spark.{Logging, SparkConf, SparkException}
 
 /** Trait that represents the metadata related to storage of blocks */
 private[streaming] trait ReceivedBlockStoreResult {

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala
index 2252e28..b081524 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala
@@ -22,8 +22,8 @@ import java.nio.ByteBuffer
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.JavaConverters._
 
-import org.apache.spark.storage.StorageLevel
 import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.storage.StorageLevel
 
 /**
  * :: DeveloperApi ::

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisor.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisor.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisor.scala
index 158d1ba..c42a9ac 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisor.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisor.scala
@@ -24,9 +24,9 @@ import scala.collection.mutable.ArrayBuffer
 import scala.concurrent._
 import scala.util.control.NonFatal
 
-import org.apache.spark.{SparkEnv, Logging, SparkConf}
+import org.apache.spark.{Logging, SparkConf, SparkEnv}
 import org.apache.spark.storage.StreamBlockId
-import org.apache.spark.util.{Utils, ThreadUtils}
+import org.apache.spark.util.{ThreadUtils, Utils}
 
 /**
  * Abstract class that is responsible for supervising a Receiver in the worker.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala
index 167f56a..b774b6b 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala
@@ -26,13 +26,13 @@ import scala.collection.mutable.ArrayBuffer
 import com.google.common.base.Throwables
 import org.apache.hadoop.conf.Configuration
 
+import org.apache.spark.{Logging, SparkEnv, SparkException}
 import org.apache.spark.rpc.{RpcEnv, ThreadSafeRpcEndpoint}
 import org.apache.spark.storage.StreamBlockId
 import org.apache.spark.streaming.Time
 import org.apache.spark.streaming.scheduler._
 import org.apache.spark.streaming.util.WriteAheadLogUtils
 import org.apache.spark.util.RpcUtils
-import org.apache.spark.{Logging, SparkEnv, SparkException}
 
 /**
  * Concrete implementation of [[org.apache.spark.streaming.receiver.ReceiverSupervisor]]

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/scheduler/InputInfoTracker.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/InputInfoTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/InputInfoTracker.scala
index deb15d0..92da0ce 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/InputInfoTracker.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/InputInfoTracker.scala
@@ -21,7 +21,7 @@ import scala.collection.mutable
 
 import org.apache.spark.Logging
 import org.apache.spark.annotation.DeveloperApi
-import org.apache.spark.streaming.{Time, StreamingContext}
+import org.apache.spark.streaming.{StreamingContext, Time}
 
 /**
  * :: DeveloperApi ::

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/scheduler/Job.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/Job.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/Job.scala
index ab1b356..7050d7e 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/Job.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/Job.scala
@@ -20,7 +20,7 @@ package org.apache.spark.streaming.scheduler
 import scala.util.{Failure, Try}
 
 import org.apache.spark.streaming.Time
-import org.apache.spark.util.{Utils, CallSite}
+import org.apache.spark.util.{CallSite, Utils}
 
 /**
  * Class representing a Spark computation. It may contain multiple Spark jobs.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala
index 8dfdc1f..a5a01e7 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobGenerator.scala
@@ -19,10 +19,10 @@ package org.apache.spark.streaming.scheduler
 
 import scala.util.{Failure, Success, Try}
 
-import org.apache.spark.{SparkEnv, Logging}
+import org.apache.spark.{Logging, SparkEnv}
 import org.apache.spark.streaming.{Checkpoint, CheckpointWriter, Time}
 import org.apache.spark.streaming.util.RecurringTimer
-import org.apache.spark.util.{Utils, Clock, EventLoop, ManualClock}
+import org.apache.spark.util.{Clock, EventLoop, ManualClock, Utils}
 
 /** Event classes for JobGenerator */
 private[scheduler] sealed trait JobGeneratorEvent

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala
index 4dab64d..60b5c83 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala
@@ -27,11 +27,11 @@ import scala.util.control.NonFatal
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.network.util.JavaUtils
 import org.apache.spark.streaming.Time
 import org.apache.spark.streaming.util.{BatchedWriteAheadLog, WriteAheadLog, WriteAheadLogUtils}
 import org.apache.spark.util.{Clock, Utils}
-import org.apache.spark.{Logging, SparkConf}
 
 /** Trait representing any event in the ReceivedBlockTracker that updates its state. */
 private[streaming] sealed trait ReceivedBlockTrackerLogEvent

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala
index ea5d12b..9ddf176 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceiverTracker.scala
@@ -20,14 +20,14 @@ package org.apache.spark.streaming.scheduler
 import java.util.concurrent.{CountDownLatch, TimeUnit}
 
 import scala.collection.mutable.HashMap
-import scala.concurrent.{Future, ExecutionContext}
+import scala.concurrent.{ExecutionContext, Future}
 import scala.language.existentials
 import scala.util.{Failure, Success}
 
 import org.apache.spark._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.rpc._
-import org.apache.spark.scheduler.{TaskLocation, ExecutorCacheTaskLocation}
+import org.apache.spark.scheduler.{ExecutorCacheTaskLocation, TaskLocation}
 import org.apache.spark.streaming.{StreamingContext, Time}
 import org.apache.spark.streaming.receiver._
 import org.apache.spark.streaming.util.WriteAheadLogUtils

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListener.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListener.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListener.scala
index d19bdbb..58fc78d 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListener.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/StreamingListener.scala
@@ -19,8 +19,8 @@ package org.apache.spark.streaming.scheduler
 
 import scala.collection.mutable.Queue
 
-import org.apache.spark.util.Distribution
 import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.util.Distribution
 
 /**
  * :: DeveloperApi ::

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchPage.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchPage.scala b/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchPage.scala
index bc17119..7635f79 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchPage.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/ui/BatchPage.scala
@@ -25,8 +25,8 @@ import org.apache.commons.lang3.StringEscapeUtils
 
 import org.apache.spark.streaming.Time
 import org.apache.spark.streaming.ui.StreamingJobProgressListener.{OutputOpId, SparkJobId}
-import org.apache.spark.ui.jobs.UIData.JobUIData
 import org.apache.spark.ui.{UIUtils => SparkUIUtils, WebUIPage}
+import org.apache.spark.ui.jobs.UIData.JobUIData
 
 private[ui] case class SparkJobIdWithUIData(sparkJobId: SparkJobId, jobUIData: Option[JobUIData])
 

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala b/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala
index f6cc6ed..4908be0 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala
@@ -17,19 +17,13 @@
 
 package org.apache.spark.streaming.ui
 
-import java.util.LinkedHashMap
-import java.util.{Map => JMap}
-import java.util.Properties
+import java.util.{LinkedHashMap, Map => JMap, Properties}
 
-import scala.collection.mutable.{ArrayBuffer, Queue, HashMap, SynchronizedBuffer}
+import scala.collection.mutable.{ArrayBuffer, HashMap, Queue, SynchronizedBuffer}
 
 import org.apache.spark.scheduler._
-import org.apache.spark.streaming.{Time, StreamingContext}
+import org.apache.spark.streaming.{StreamingContext, Time}
 import org.apache.spark.streaming.scheduler._
-import org.apache.spark.streaming.scheduler.StreamingListenerReceiverStarted
-import org.apache.spark.streaming.scheduler.StreamingListenerBatchStarted
-import org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
-
 
 private[streaming] class StreamingJobProgressListener(ssc: StreamingContext)
   extends StreamingListener with SparkListener {

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingTab.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingTab.scala b/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingTab.scala
index bc53f2a..0662c64 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingTab.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingTab.scala
@@ -21,14 +21,14 @@ import org.apache.spark.{Logging, SparkException}
 import org.apache.spark.streaming.StreamingContext
 import org.apache.spark.ui.{SparkUI, SparkUITab}
 
-import StreamingTab._
-
 /**
  * Spark Web UI tab that shows statistics of a streaming job.
  * This assumes the given SparkContext has enabled its SparkUI.
  */
 private[spark] class StreamingTab(val ssc: StreamingContext)
-  extends SparkUITab(getSparkUI(ssc), "streaming") with Logging {
+  extends SparkUITab(StreamingTab.getSparkUI(ssc), "streaming") with Logging {
+
+  import StreamingTab._
 
   private val STATIC_RESOURCE_DIR = "org/apache/spark/streaming/ui/static"
 

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/ui/UIUtils.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/ui/UIUtils.scala b/streaming/src/main/scala/org/apache/spark/streaming/ui/UIUtils.scala
index d89f7ad..a485a46 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/ui/UIUtils.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/ui/UIUtils.scala
@@ -17,14 +17,14 @@
 
 package org.apache.spark.streaming.ui
 
-import scala.xml.Node
-
-import org.apache.commons.lang3.StringEscapeUtils
-
 import java.text.SimpleDateFormat
 import java.util.TimeZone
 import java.util.concurrent.TimeUnit
 
+import scala.xml.Node
+
+import org.apache.commons.lang3.StringEscapeUtils
+
 private[streaming] object UIUtils {
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala
index b2cd524..8cb45cd 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala
@@ -18,8 +18,8 @@
 package org.apache.spark.streaming.util
 
 import java.nio.ByteBuffer
-import java.util.concurrent.LinkedBlockingQueue
 import java.util.{Iterator => JIterator}
+import java.util.concurrent.LinkedBlockingQueue
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala
index b946e0d..9418bee 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala
@@ -17,8 +17,8 @@
 package org.apache.spark.streaming.util
 
 import java.nio.ByteBuffer
-import java.util.concurrent.{RejectedExecutionException, ThreadPoolExecutor}
 import java.util.{Iterator => JIterator}
+import java.util.concurrent.{RejectedExecutionException, ThreadPoolExecutor}
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
@@ -29,8 +29,8 @@ import scala.language.postfixOps
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.Path
 
-import org.apache.spark.util.{CompletionIterator, ThreadUtils}
 import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.util.{CompletionIterator, ThreadUtils}
 
 /**
  * This class manages write ahead log files.

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLogReader.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLogReader.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLogReader.scala
index a375c07..e79b139 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLogReader.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLogReader.scala
@@ -16,10 +16,11 @@
  */
 package org.apache.spark.streaming.util
 
-import java.io.{IOException, Closeable, EOFException}
+import java.io.{Closeable, EOFException, IOException}
 import java.nio.ByteBuffer
 
 import org.apache.hadoop.conf.Configuration
+
 import org.apache.spark.Logging
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
index a96e292..5c3c7a6 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
@@ -17,13 +17,12 @@
 
 package org.apache.spark.streaming.util
 
-import scala.annotation.tailrec
-
 import java.io.OutputStream
 import java.util.concurrent.TimeUnit._
 
-import org.apache.spark.Logging
+import scala.annotation.tailrec
 
+import org.apache.spark.Logging
 
 private[streaming]
 class RateLimitedOutputStream(out: OutputStream, desiredBytesPerSec: Int)

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
index 6addb96..e48eaf7 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
@@ -23,7 +23,7 @@ import java.nio.ByteBuffer
 
 import scala.io.Source
 
-import org.apache.spark.{SparkConf, Logging}
+import org.apache.spark.{Logging, SparkConf}
 import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.util.IntParam
 

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/streaming/src/main/scala/org/apache/spark/streaming/util/WriteAheadLogUtils.scala
----------------------------------------------------------------------
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/WriteAheadLogUtils.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/WriteAheadLogUtils.scala
index 7f9e2c9..ed616d8 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/WriteAheadLogUtils.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/WriteAheadLogUtils.scala
@@ -21,8 +21,8 @@ import scala.util.control.NonFatal
 
 import org.apache.hadoop.conf.Configuration
 
-import org.apache.spark.util.Utils
 import org.apache.spark.{Logging, SparkConf, SparkException}
+import org.apache.spark.util.Utils
 
 /** A helper class with utility functions related to the WriteAheadLog interface */
 private[streaming] object WriteAheadLogUtils extends Logging {

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
index 5155daa..a947fac 100644
--- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
@@ -23,8 +23,8 @@ import java.util.jar.JarFile
 
 import scala.collection.mutable
 import scala.collection.JavaConverters._
-import scala.reflect.runtime.universe.runtimeMirror
 import scala.reflect.runtime.{universe => unv}
+import scala.reflect.runtime.universe.runtimeMirror
 import scala.util.Try
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
index 856ea17..6fb7184 100644
--- a/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
@@ -17,16 +17,16 @@
 
 package org.apache.spark.tools
 
-import java.lang.reflect.{Type, Method}
+import java.lang.reflect.{Method, Type}
 
 import scala.collection.mutable.ArrayBuffer
 import scala.language.existentials
 
 import org.apache.spark._
 import org.apache.spark.api.java._
-import org.apache.spark.rdd.{RDD, DoubleRDDFunctions, PairRDDFunctions, OrderedRDDFunctions}
+import org.apache.spark.rdd.{DoubleRDDFunctions, OrderedRDDFunctions, PairRDDFunctions, RDD}
 import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStream, JavaStreamingContext}
+import org.apache.spark.streaming.api.java.{JavaDStream, JavaPairDStream, JavaStreamingContext}
 import org.apache.spark.streaming.dstream.{DStream, PairDStreamFunctions}
 
 

http://git-wip-us.apache.org/repos/asf/spark/blob/efb10cc9/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala b/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
index 0dc2861..8a5c7c0 100644
--- a/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/StoragePerfTester.scala
@@ -20,8 +20,8 @@ package org.apache.spark.tools
 import java.util.concurrent.{CountDownLatch, Executors}
 import java.util.concurrent.atomic.AtomicLong
 
-import org.apache.spark.executor.ShuffleWriteMetrics
 import org.apache.spark.{SparkConf, SparkContext}
+import org.apache.spark.executor.ShuffleWriteMetrics
 import org.apache.spark.serializer.KryoSerializer
 import org.apache.spark.shuffle.hash.HashShuffleManager
 import org.apache.spark.util.Utils


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org