You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by va...@apache.org on 2017/08/15 20:59:10 UTC

[3/3] spark git commit: [SPARK-21731][BUILD] Upgrade scalastyle to 0.9.

[SPARK-21731][BUILD] Upgrade scalastyle to 0.9.

This version fixes a few issues in the import order checker; it provides
better error messages, and detects more improper ordering (thus the need
to change a lot of files in this patch). The main fix is that it correctly
complains about the order of packages vs. classes.

As part of the above, I moved some "SparkSession" import in ML examples
inside the "$example on$" blocks; that didn't seem consistent across
different source files to start with, and avoids having to add more on/off blocks
around specific imports.

The new scalastyle also seems to have a better header detector, so a few
license headers had to be updated to match the expected indentation.

Author: Marcelo Vanzin <va...@cloudera.com>

Closes #18943 from vanzin/SPARK-21731.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3f958a99
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3f958a99
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3f958a99

Branch: refs/heads/master
Commit: 3f958a99921d149fb9fdf7ba7e78957afdad1405
Parents: cba826d
Author: Marcelo Vanzin <va...@cloudera.com>
Authored: Tue Aug 15 13:59:00 2017 -0700
Committer: Marcelo Vanzin <va...@cloudera.com>
Committed: Tue Aug 15 13:59:00 2017 -0700

----------------------------------------------------------------------
 .../apache/spark/api/java/JavaDoubleRDD.scala   |  2 +-
 .../spark/api/python/PythonWorkerFactory.scala  |  2 +-
 .../apache/spark/api/r/JVMObjectTracker.scala   |  2 +-
 .../apache/spark/api/r/RBackendHandler.scala    |  2 +-
 .../org/apache/spark/deploy/PythonRunner.scala  |  2 +-
 .../apache/spark/deploy/SparkHadoopUtil.scala   |  2 +-
 .../deploy/rest/RestSubmissionServer.scala      |  2 +-
 .../deploy/rest/StandaloneRestServer.scala      |  2 +-
 .../spark/deploy/worker/WorkerArguments.scala   |  2 +-
 .../apache/spark/input/PortableDataStream.scala |  2 +-
 .../spark/input/WholeTextFileInputFormat.scala  |  5 +--
 .../apache/spark/metrics/MetricsSystem.scala    |  2 +-
 .../org/apache/spark/rdd/CoGroupedRDD.scala     |  2 +-
 .../apache/spark/rdd/DoubleRDDFunctions.scala   |  2 +-
 .../org/apache/spark/rdd/PairRDDFunctions.scala |  2 +-
 .../spark/rdd/PartitionwiseSampledRDD.scala     |  2 +-
 .../org/apache/spark/rdd/coalesce-public.scala  |  2 +-
 .../apache/spark/scheduler/DAGScheduler.scala   |  2 +-
 .../apache/spark/scheduler/TaskSetManager.scala |  2 +-
 .../status/api/v1/AllExecutorListResource.scala | 30 ++++++++---------
 .../status/api/v1/ExecutorListResource.scala    | 30 ++++++++---------
 .../storage/BlockManagerMasterEndpoint.scala    |  2 +-
 .../scala/org/apache/spark/ui/JettyUtils.scala  |  2 +-
 .../org/apache/spark/ui/exec/ExecutorsTab.scala |  2 +-
 .../org/apache/spark/DistributedSuite.scala     |  2 +-
 .../org/apache/spark/JobCancellationSuite.scala |  2 +-
 .../apache/spark/MapOutputTrackerSuite.scala    |  2 +-
 .../org/apache/spark/SparkContextSuite.scala    |  2 +-
 .../apache/spark/deploy/SparkSubmitSuite.scala  |  4 +--
 .../deploy/rest/StandaloneRestSubmitSuite.scala |  2 +-
 .../netty/NettyBlockTransferSecuritySuite.scala |  2 +-
 .../apache/spark/rdd/AsyncRDDActionsSuite.scala |  2 +-
 .../org/apache/spark/rpc/RpcEnvSuite.scala      |  2 +-
 .../spark/scheduler/BlacklistTrackerSuite.scala |  2 +-
 .../apache/spark/scheduler/MapStatusSuite.scala |  2 +-
 .../spark/scheduler/SparkListenerSuite.scala    |  2 +-
 .../spark/scheduler/TaskResultGetterSuite.scala |  2 +-
 .../scheduler/TaskSchedulerImplSuite.scala      |  2 +-
 .../spark/scheduler/TaskSetManagerSuite.scala   |  2 +-
 .../spark/storage/BlockManagerSuite.scala       |  2 +-
 .../apache/spark/storage/DiskStoreSuite.scala   |  2 +-
 .../apache/spark/util/JsonProtocolSuite.scala   |  2 +-
 .../ml/BucketedRandomProjectionLSHExample.scala |  2 +-
 .../spark/examples/ml/MinHashLSHExample.scala   |  2 +-
 .../spark/examples/ml/TokenizerExample.scala    |  2 +-
 .../examples/ml/UnaryTransformerExample.scala   |  4 +--
 .../spark/examples/ml/VectorSlicerExample.scala |  3 +-
 .../spark/examples/mllib/TallSkinnyPCA.scala    |  2 +-
 .../spark/examples/mllib/TallSkinnySVD.scala    |  2 +-
 .../pythonconverters/AvroConverters.scala       |  6 ++--
 .../sql/UserDefinedTypedAggregation.scala       |  4 +--
 .../sql/UserDefinedUntypedAggregation.scala     |  3 +-
 .../streaming/DirectKafkaWordCount.scala        |  2 +-
 .../spark/streaming/flume/FlumeTestUtils.scala  |  2 +-
 .../streaming/flume/PollingFlumeTestUtils.scala |  2 +-
 .../spark/sql/kafka010/KafkaTestUtils.scala     |  2 +-
 .../kafka010/DirectKafkaStreamSuite.scala       |  2 +-
 .../kafka/DirectKafkaStreamSuite.scala          |  2 +-
 .../streaming/kinesis/KinesisInputDStream.scala |  3 +-
 .../org/apache/spark/graphx/GraphOps.scala      |  2 +-
 .../org/apache/spark/ml/impl/UtilsSuite.scala   |  2 +-
 .../apache/spark/ml/feature/Interaction.scala   |  4 +--
 .../spark/ml/feature/SQLTransformer.scala       |  2 +-
 .../org/apache/spark/ml/param/params.scala      |  2 +-
 .../spark/ml/regression/LinearRegression.scala  |  2 +-
 .../spark/ml/source/libsvm/LibSVMRelation.scala |  2 +-
 .../mllib/clustering/BisectingKMeansModel.scala |  2 +-
 .../spark/mllib/feature/ChiSqSelector.scala     |  2 +-
 .../mllib/regression/IsotonicRegression.scala   |  3 +-
 .../spark/mllib/regression/LabeledPoint.scala   |  2 +-
 .../spark/ml/recommendation/ALSSuite.scala      |  2 +-
 pom.xml                                         |  2 +-
 project/SparkBuild.scala                        |  4 +--
 project/plugins.sbt                             |  2 +-
 .../mesos/MesosClusterDispatcherArguments.scala |  2 +-
 .../deploy/mesos/ui/MesosClusterPage.scala      |  2 +-
 .../cluster/mesos/MesosClusterScheduler.scala   |  2 +-
 ...esosCoarseGrainedSchedulerBackendSuite.scala |  4 +--
 ...ityPreferredContainerPlacementStrategy.scala |  2 +-
 .../spark/deploy/yarn/YarnAllocator.scala       |  2 +-
 .../spark/deploy/yarn/YarnAllocatorSuite.scala  |  2 +-
 .../yarn/YarnShuffleIntegrationSuite.scala      | 30 ++++++++---------
 .../spark/sql/catalyst/analysis/Analyzer.scala  |  2 +-
 .../sql/catalyst/analysis/CheckAnalysis.scala   |  2 +-
 .../sql/catalyst/encoders/RowEncoder.scala      |  4 +--
 .../spark/sql/catalyst/errors/package.scala     |  2 +-
 .../expressions/aggregate/Percentile.scala      |  2 +-
 .../expressions/aggregate/collect.scala         |  2 +-
 .../spark/sql/catalyst/expressions/hash.scala   |  2 +-
 .../catalyst/expressions/jsonExpressions.scala  |  4 +--
 .../catalyst/expressions/mathExpressions.scala  |  2 +-
 .../sql/catalyst/optimizer/expressions.scala    |  2 +-
 .../spark/sql/catalyst/optimizer/subquery.scala |  2 +-
 .../sql/catalyst/plans/logical/Statistics.scala |  2 +-
 .../spark/sql/catalyst/trees/TreeNode.scala     |  2 +-
 .../org/apache/spark/sql/types/StructType.scala |  2 +-
 .../analysis/DecimalPrecisionSuite.scala        |  2 +-
 .../encoders/EncoderResolutionSuite.scala       |  2 +-
 .../optimizer/BooleanSimplificationSuite.scala  |  2 +-
 .../optimizer/CombiningLimitsSuite.scala        |  2 +-
 .../optimizer/EliminateSerializationSuite.scala |  2 +-
 .../optimizer/LikeSimplificationSuite.scala     |  2 +-
 .../catalyst/optimizer/OptimizeInSuite.scala    |  2 +-
 .../SimplifyStringCaseConversionSuite.scala     |  2 +-
 .../catalyst/parser/DataTypeParserSuite.scala   | 30 ++++++++---------
 .../parser/TableSchemaParserSuite.scala         | 30 ++++++++---------
 .../sql/catalyst/trees/TreeNodeSuite.scala      |  8 ++---
 .../sql/catalyst/util/UnsafeArraySuite.scala    |  2 +-
 .../apache/spark/sql/types/DataTypeSuite.scala  | 30 ++++++++---------
 .../scala/org/apache/spark/sql/Column.scala     | 30 ++++++++---------
 .../apache/spark/sql/DataFrameNaFunctions.scala | 30 ++++++++---------
 .../org/apache/spark/sql/DataFrameReader.scala  | 34 ++++++++++----------
 .../spark/sql/DataFrameStatFunctions.scala      | 30 ++++++++---------
 .../org/apache/spark/sql/DatasetHolder.scala    | 30 ++++++++---------
 .../org/apache/spark/sql/api/package.scala      | 30 ++++++++---------
 .../org/apache/spark/sql/api/r/SQLUtils.scala   |  2 +-
 .../spark/sql/execution/CacheManager.scala      |  3 +-
 .../execution/aggregate/HashAggregateExec.scala |  2 +-
 .../execution/aggregate/SortAggregateExec.scala |  2 +-
 .../sql/execution/arrow/ArrowConverters.scala   | 30 ++++++++---------
 .../sql/execution/command/CommandUtils.scala    | 30 ++++++++---------
 .../spark/sql/execution/command/tables.scala    |  2 +-
 .../datasources/FileFormatWriter.scala          |  2 +-
 .../datasources/InMemoryFileIndex.scala         |  2 +-
 .../datasources/jdbc/JDBCRelation.scala         |  2 +-
 .../spark/sql/execution/metric/SQLMetrics.scala | 30 ++++++++---------
 .../apache/spark/sql/execution/objects.scala    |  5 ++-
 .../execution/python/BatchEvalPythonExec.scala  | 30 ++++++++---------
 .../sql/execution/python/EvaluatePython.scala   | 30 ++++++++---------
 .../execution/python/ExtractPythonUDFs.scala    | 30 ++++++++---------
 .../spark/sql/execution/python/RowQueue.scala   | 30 ++++++++---------
 .../python/UserDefinedPythonFunction.scala      |  2 +-
 .../sql/execution/r/MapPartitionsRWrapper.scala |  2 +-
 .../sql/execution/stat/FrequentItems.scala      | 30 ++++++++---------
 .../execution/streaming/HDFSMetadataLog.scala   | 30 ++++++++---------
 .../streaming/IncrementalExecution.scala        | 30 ++++++++---------
 .../sql/execution/streaming/MetadataLog.scala   | 30 ++++++++---------
 .../sql/execution/streaming/OffsetSeqLog.scala  | 30 ++++++++---------
 .../execution/streaming/ProgressReporter.scala  |  2 +-
 .../spark/sql/execution/streaming/console.scala |  5 ++-
 .../sql/expressions/UserDefinedFunction.scala   | 32 +++++++++---------
 .../spark/sql/DataFrameImplicitsSuite.scala     | 30 ++++++++---------
 .../scala/org/apache/spark/sql/JoinSuite.scala  |  4 +--
 .../scala/org/apache/spark/sql/RowSuite.scala   | 30 ++++++++---------
 .../org/apache/spark/sql/SQLContextSuite.scala  | 32 +++++++++---------
 .../apache/spark/sql/api/r/SQLUtilsSuite.scala  | 30 ++++++++---------
 .../sql/execution/SQLWindowFunctionSuite.scala  |  2 +-
 .../execution/UnsafeRowSerializerSuite.scala    |  2 +-
 .../SortBasedAggregationStoreSuite.scala        |  2 +-
 .../execution/datasources/csv/CSVSuite.scala    |  2 +-
 .../execution/datasources/json/JsonSuite.scala  |  2 +-
 .../parquet/ParquetReadBenchmark.scala          |  2 +-
 .../execution/joins/BroadcastJoinSuite.scala    |  2 +-
 .../CompactibleFileStreamLogSuite.scala         |  2 +-
 .../sql/execution/ui/SQLListenerSuite.scala     |  2 +-
 .../vectorized/ColumnarBatchSuite.scala         |  2 +-
 .../spark/sql/internal/SQLConfSuite.scala       | 30 ++++++++---------
 .../spark/sql/sources/DDLSourceLoadSuite.scala  | 30 ++++++++---------
 .../spark/sql/sources/DataSourceTest.scala      | 30 ++++++++---------
 .../spark/sql/sources/FilteredScanSuite.scala   | 30 ++++++++---------
 .../spark/sql/sources/PathOptionSuite.scala     | 30 ++++++++---------
 .../spark/sql/sources/PrunedScanSuite.scala     | 30 ++++++++---------
 .../sql/sources/ResolvedDataSourceSuite.scala   | 30 ++++++++---------
 .../spark/sql/sources/TableScanSuite.scala      | 30 ++++++++---------
 .../spark/sql/sources/fakeExternalSources.scala | 30 ++++++++---------
 .../streaming/StreamingQueryListenerSuite.scala |  4 +--
 .../sql/streaming/StreamingQuerySuite.scala     |  6 ++--
 .../apache/spark/sql/test/SQLTestUtils.scala    |  2 +-
 .../thriftserver/HiveThriftServer2Suites.scala  |  4 +--
 .../org/apache/spark/sql/hive/HiveUtils.scala   |  2 +-
 .../apache/spark/sql/hive/client/HiveShim.scala |  2 +-
 .../hive/execution/InsertIntoHiveTable.scala    |  4 +--
 .../hive/HiveContextCompatibilitySuite.scala    | 30 ++++++++---------
 .../spark/sql/hive/HiveInspectorSuite.scala     |  2 +-
 .../sql/hive/HiveSchemaInferenceSuite.scala     |  2 +-
 .../apache/spark/sql/hive/HiveUtilsSuite.scala  |  2 +-
 .../apache/spark/sql/hive/ListTablesSuite.scala | 30 ++++++++---------
 .../hive/PartitionedTablePerfStatsSuite.scala   |  2 +-
 .../spark/sql/hive/execution/HiveDDLSuite.scala |  2 +-
 .../spark/sql/hive/execution/HivePlanTest.scala |  2 +-
 .../spark/streaming/receiver/Receiver.scala     |  2 +-
 .../receiver/ReceiverSupervisorImpl.scala       |  2 +-
 .../streaming/util/BatchedWriteAheadLog.scala   |  2 +-
 .../spark/streaming/BasicOperationsSuite.scala  |  3 +-
 184 files changed, 766 insertions(+), 783 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
index b71af0d..b6df566 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
@@ -22,8 +22,8 @@ import java.lang.{Double => JDouble}
 import scala.language.implicitConversions
 import scala.reflect.ClassTag
 
-import org.apache.spark.annotation.Since
 import org.apache.spark.Partitioner
+import org.apache.spark.annotation.Since
 import org.apache.spark.api.java.function.{Function => JFunction}
 import org.apache.spark.partial.{BoundedDouble, PartialResult}
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index 6a5e6f7..fc595ae 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -22,8 +22,8 @@ import java.net.{InetAddress, ServerSocket, Socket, SocketException}
 import java.nio.charset.StandardCharsets
 import java.util.Arrays
 
-import scala.collection.mutable
 import scala.collection.JavaConverters._
+import scala.collection.mutable
 
 import org.apache.spark._
 import org.apache.spark.internal.Logging

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/api/r/JVMObjectTracker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/JVMObjectTracker.scala b/core/src/main/scala/org/apache/spark/api/r/JVMObjectTracker.scala
index fe7438a..b8c4ff9 100644
--- a/core/src/main/scala/org/apache/spark/api/r/JVMObjectTracker.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/JVMObjectTracker.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.api.r
 
-import java.util.concurrent.atomic.AtomicInteger
 import java.util.concurrent.ConcurrentHashMap
+import java.util.concurrent.atomic.AtomicInteger
 
 /** JVM object ID wrapper */
 private[r] case class JVMObjectId(id: String) {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
index cfd37ac..18fc595 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
@@ -26,9 +26,9 @@ import io.netty.channel.{ChannelHandlerContext, SimpleChannelInboundHandler}
 import io.netty.channel.ChannelHandler.Sharable
 import io.netty.handler.timeout.ReadTimeoutException
 
+import org.apache.spark.SparkConf
 import org.apache.spark.api.r.SerDe._
 import org.apache.spark.internal.Logging
-import org.apache.spark.SparkConf
 import org.apache.spark.util.{ThreadUtils, Utils}
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala b/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala
index a8f732b..7aca305 100644
--- a/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala
@@ -20,8 +20,8 @@ package org.apache.spark.deploy
 import java.io.File
 import java.net.URI
 
-import scala.collection.mutable.ArrayBuffer
 import scala.collection.JavaConverters._
+import scala.collection.mutable.ArrayBuffer
 import scala.util.Try
 
 import org.apache.spark.{SparkConf, SparkUserAppException}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index eeb6d10..e26f61d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -22,8 +22,8 @@ import java.security.PrivilegedExceptionAction
 import java.text.DateFormat
 import java.util.{Arrays, Comparator, Date, Locale}
 
-import scala.collection.immutable.Map
 import scala.collection.JavaConverters._
+import scala.collection.immutable.Map
 import scala.collection.mutable
 import scala.collection.mutable.HashMap
 import scala.util.control.NonFatal

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
index b30c980..e88195d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
@@ -11,7 +11,7 @@
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
+ * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
index 5662006..0164084 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
@@ -11,7 +11,7 @@
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
+ * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index bd07d34..5802812 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -21,8 +21,8 @@ import java.lang.management.ManagementFactory
 
 import scala.annotation.tailrec
 
-import org.apache.spark.util.{IntParam, MemoryParam, Utils}
 import org.apache.spark.SparkConf
+import org.apache.spark.util.{IntParam, MemoryParam, Utils}
 
 /**
  * Command-line parser for the worker.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala b/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala
index 9606c47..17cdba4 100644
--- a/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala
+++ b/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala
@@ -27,9 +27,9 @@ import org.apache.hadoop.fs.Path
 import org.apache.hadoop.mapreduce.{InputSplit, JobContext, RecordReader, TaskAttemptContext}
 import org.apache.hadoop.mapreduce.lib.input.{CombineFileInputFormat, CombineFileRecordReader, CombineFileSplit}
 
-import org.apache.spark.internal.config
 import org.apache.spark.SparkContext
 import org.apache.spark.annotation.Since
+import org.apache.spark.internal.config
 
 /**
  * A general format for reading whole files in as streams, byte arrays,

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala b/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala
index fa34f1e..f47cd38 100644
--- a/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala
+++ b/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala
@@ -21,11 +21,8 @@ import scala.collection.JavaConverters._
 
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.io.Text
-import org.apache.hadoop.mapreduce.InputSplit
-import org.apache.hadoop.mapreduce.JobContext
+import org.apache.hadoop.mapreduce.{InputSplit, JobContext, RecordReader, TaskAttemptContext}
 import org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat
-import org.apache.hadoop.mapreduce.RecordReader
-import org.apache.hadoop.mapreduce.TaskAttemptContext
 
 /**
  * A [[org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat CombineFileInputFormat]] for

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index 1d49450..3457a26 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -26,8 +26,8 @@ import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
 import org.eclipse.jetty.servlet.ServletContextHandler
 
 import org.apache.spark.{SecurityManager, SparkConf}
-import org.apache.spark.internal.config._
 import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config._
 import org.apache.spark.metrics.sink.{MetricsServlet, Sink}
 import org.apache.spark.metrics.source.{Source, StaticSources}
 import org.apache.spark.util.Utils

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
index a091f06..4574c37 100644
--- a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
@@ -26,8 +26,8 @@ import scala.reflect.ClassTag
 import org.apache.spark._
 import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.serializer.Serializer
-import org.apache.spark.util.collection.{CompactBuffer, ExternalAppendOnlyMap}
 import org.apache.spark.util.Utils
+import org.apache.spark.util.collection.{CompactBuffer, ExternalAppendOnlyMap}
 
 /**
  * The references to rdd and splitIndex are transient because redundant information is stored

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
index 14331df..57782c0 100644
--- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.rdd
 
-import org.apache.spark.annotation.Since
 import org.apache.spark.TaskContext
+import org.apache.spark.annotation.Since
 import org.apache.spark.internal.Logging
 import org.apache.spark.partial.BoundedDouble
 import org.apache.spark.partial.MeanEvaluator

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index 4628fa8..e68c6b1 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -35,8 +35,8 @@ import org.apache.hadoop.mapreduce.{Job => NewAPIHadoopJob, OutputFormat => NewO
 import org.apache.spark._
 import org.apache.spark.Partitioner.defaultPartitioner
 import org.apache.spark.annotation.Experimental
-import org.apache.spark.internal.io._
 import org.apache.spark.internal.Logging
+import org.apache.spark.internal.io._
 import org.apache.spark.partial.{BoundedDouble, PartialResult}
 import org.apache.spark.serializer.Serializer
 import org.apache.spark.util.{SerializableConfiguration, SerializableJobConf, Utils}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
index 6a89ea8..15691a8 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala
@@ -22,8 +22,8 @@ import java.util.Random
 import scala.reflect.ClassTag
 
 import org.apache.spark.{Partition, TaskContext}
-import org.apache.spark.util.random.RandomSampler
 import org.apache.spark.util.Utils
+import org.apache.spark.util.random.RandomSampler
 
 private[spark]
 class PartitionwiseSampledRDDPartition(val prev: Partition, val seed: Long)

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/rdd/coalesce-public.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/coalesce-public.scala b/core/src/main/scala/org/apache/spark/rdd/coalesce-public.scala
index e00bc22..1f8ab78 100644
--- a/core/src/main/scala/org/apache/spark/rdd/coalesce-public.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/coalesce-public.scala
@@ -19,8 +19,8 @@ package org.apache.spark.rdd
 
 import scala.collection.mutable
 
-import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.Partition
+import org.apache.spark.annotation.DeveloperApi
 
 /**
  * ::DeveloperApi::

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index 89b4cab..21bf9d0 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -35,8 +35,8 @@ import org.apache.commons.lang3.SerializationUtils
 import org.apache.spark._
 import org.apache.spark.broadcast.Broadcast
 import org.apache.spark.executor.TaskMetrics
-import org.apache.spark.internal.config
 import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config
 import org.apache.spark.network.util.JavaUtils
 import org.apache.spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
index c251071..c2f8178 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
@@ -26,9 +26,9 @@ import scala.math.max
 import scala.util.control.NonFatal
 
 import org.apache.spark._
+import org.apache.spark.TaskState.TaskState
 import org.apache.spark.internal.Logging
 import org.apache.spark.scheduler.SchedulingMode._
-import org.apache.spark.TaskState.TaskState
 import org.apache.spark.util.{AccumulatorV2, Clock, SystemClock, Utils}
 import org.apache.spark.util.collection.MedianHeap
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/status/api/v1/AllExecutorListResource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/AllExecutorListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/AllExecutorListResource.scala
index 01f2a18..eb5cc1b 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/AllExecutorListResource.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/AllExecutorListResource.scala
@@ -1,19 +1,19 @@
 /*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*    http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.spark.status.api.v1
 
 import javax.ws.rs.{GET, Produces}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
index ab53881..2f3b5e9 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala
@@ -1,19 +1,19 @@
 /*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*    http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.spark.status.api.v1
 
 import javax.ws.rs.{GET, Produces}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala
index 6f85b9e..df0a5f5 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala
@@ -19,8 +19,8 @@ package org.apache.spark.storage
 
 import java.util.{HashMap => JHashMap}
 
-import scala.collection.mutable
 import scala.collection.JavaConverters._
+import scala.collection.mutable
 import scala.concurrent.{ExecutionContext, Future}
 import scala.util.Random
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index 52b7ab6..3ddaac7 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -25,8 +25,8 @@ import scala.collection.mutable.ArrayBuffer
 import scala.language.implicitConversions
 import scala.xml.Node
 
-import org.eclipse.jetty.client.api.Response
 import org.eclipse.jetty.client.HttpClient
+import org.eclipse.jetty.client.api.Response
 import org.eclipse.jetty.client.http.HttpClientTransportOverHTTP
 import org.eclipse.jetty.proxy.ProxyServlet
 import org.eclipse.jetty.server._

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala
index 770da22..64a1a29 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala
@@ -11,7 +11,7 @@
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
+ * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/DistributedSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 84f7f1f..2337238 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark
 
-import org.scalatest.concurrent.Timeouts._
 import org.scalatest.Matchers
+import org.scalatest.concurrent.Timeouts._
 import org.scalatest.time.{Millis, Span}
 
 import org.apache.spark.security.EncryptionFunSuite

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
index 99150a1..8a77aea 100644
--- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark
 import java.util.concurrent.Semaphore
 
 import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
 import scala.concurrent.Future
+import scala.concurrent.duration._
 
 import org.scalatest.BeforeAndAfter
 import org.scalatest.Matchers

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index 493ae51..ebd826b 100644
--- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -22,8 +22,8 @@ import scala.collection.mutable.ArrayBuffer
 import org.mockito.Matchers.any
 import org.mockito.Mockito._
 
-import org.apache.spark.broadcast.BroadcastManager
 import org.apache.spark.LocalSparkContext._
+import org.apache.spark.broadcast.BroadcastManager
 import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEnv}
 import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
 import org.apache.spark.shuffle.FetchFailedException

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 979270a..890e93d 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -30,8 +30,8 @@ import org.apache.hadoop.fs.{FileSystem, Path}
 import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
 import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
-import org.scalatest.concurrent.Eventually
 import org.scalatest.Matchers._
+import org.scalatest.concurrent.Eventually
 
 import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart, SparkListenerTaskEnd, SparkListenerTaskStart}
 import org.apache.spark.util.{ThreadUtils, Utils}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 97357cd..08ba41f 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -35,12 +35,12 @@ import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark._
+import org.apache.spark.TestUtils.JavaSourceFromString
 import org.apache.spark.api.r.RUtils
 import org.apache.spark.deploy.SparkSubmit._
 import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
-import org.apache.spark.internal.config._
 import org.apache.spark.internal.Logging
-import org.apache.spark.TestUtils.JavaSourceFromString
+import org.apache.spark.internal.config._
 import org.apache.spark.scheduler.EventLoggingListener
 import org.apache.spark.util.{CommandLineUtils, ResetSystemProperties, Utils}
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
index dd50e33..70887dc 100644
--- a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
@@ -11,7 +11,7 @@
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
+ * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
index 474e301..e4c133c 100644
--- a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
+++ b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferSecuritySuite.scala
@@ -28,8 +28,8 @@ import scala.util.{Failure, Success, Try}
 
 import com.google.common.io.CharStreams
 import org.mockito.Mockito._
-import org.scalatest.mock.MockitoSugar
 import org.scalatest.ShouldMatchers
+import org.scalatest.mock.MockitoSugar
 
 import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.network.{BlockDataManager, BlockTransferService}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
index b29a53c..8f639ee 100644
--- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark.rdd
 import java.util.concurrent.Semaphore
 
 import scala.concurrent._
-import scala.concurrent.duration.Duration
 import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration.Duration
 
 import org.scalatest.BeforeAndAfterAll
 import org.scalatest.concurrent.Timeouts

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 59d8c14..a799b1c 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -22,8 +22,8 @@ import java.nio.charset.StandardCharsets.UTF_8
 import java.util.UUID
 import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeUnit}
 
-import scala.collection.mutable
 import scala.collection.JavaConverters._
+import scala.collection.mutable
 import scala.concurrent.Await
 import scala.concurrent.duration._
 import scala.language.postfixOps

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
index 7ff03c4..520d85a 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/BlacklistTrackerSuite.scala
@@ -17,9 +17,9 @@
 
 package org.apache.spark.scheduler
 
-import org.mockito.invocation.InvocationOnMock
 import org.mockito.Matchers.any
 import org.mockito.Mockito.{never, verify, when}
+import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
 import org.scalatest.BeforeAndAfterEach
 import org.scalatest.mock.MockitoSugar

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
index 276169e..144e5af 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/MapStatusSuite.scala
@@ -25,8 +25,8 @@ import org.mockito.Mockito._
 import org.roaringbitmap.RoaringBitmap
 
 import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkFunSuite}
-import org.apache.spark.internal.config
 import org.apache.spark.LocalSparkContext._
+import org.apache.spark.internal.config
 import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
 import org.apache.spark.storage.BlockManagerId
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index 481603b..995df1d 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -19,8 +19,8 @@ package org.apache.spark.scheduler
 
 import java.util.concurrent.Semaphore
 
-import scala.collection.mutable
 import scala.collection.JavaConverters._
+import scala.collection.mutable
 
 import org.mockito.Mockito
 import org.scalatest.Matchers

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
index 3e55d39..1bddba8 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala
@@ -34,8 +34,8 @@ import org.scalatest.BeforeAndAfter
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark._
-import org.apache.spark.storage.TaskResultBlockId
 import org.apache.spark.TestUtils.JavaSourceFromString
+import org.apache.spark.storage.TaskResultBlockId
 import org.apache.spark.util.{MutableURLClassLoader, RpcUtils, Utils}
 
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
index a003377..ab67a39 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSchedulerImplSuite.scala
@@ -27,8 +27,8 @@ import org.scalatest.BeforeAndAfterEach
 import org.scalatest.mock.MockitoSugar
 
 import org.apache.spark._
-import org.apache.spark.internal.config
 import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config
 import org.apache.spark.util.ManualClock
 
 class FakeSchedulerBackend extends SchedulerBackend {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
index 3696df0..6f1663b 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala
@@ -28,8 +28,8 @@ import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
 
 import org.apache.spark._
-import org.apache.spark.internal.config
 import org.apache.spark.internal.Logging
+import org.apache.spark.internal.config
 import org.apache.spark.serializer.SerializerInstance
 import org.apache.spark.storage.BlockManagerId
 import org.apache.spark.util.{AccumulatorV2, ManualClock, Utils}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index 755a61a..8dd70fc 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -23,8 +23,8 @@ import java.nio.ByteBuffer
 import scala.collection.JavaConverters._
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
-import scala.concurrent.duration._
 import scala.concurrent.Future
+import scala.concurrent.duration._
 import scala.language.{implicitConversions, postfixOps}
 import scala.reflect.ClassTag
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
index 67fc084..a81ccd7 100644
--- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
@@ -26,8 +26,8 @@ import io.netty.channel.FileRegion
 import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
 import org.apache.spark.network.util.{ByteArrayWritableChannel, JavaUtils}
 import org.apache.spark.security.CryptoStreamUtils
-import org.apache.spark.util.io.ChunkedByteBuffer
 import org.apache.spark.util.Utils
+import org.apache.spark.util.io.ChunkedByteBuffer
 
 class DiskStoreSuite extends SparkFunSuite {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
index 57452d4..a1a8587 100644
--- a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
@@ -22,9 +22,9 @@ import java.util.Properties
 import scala.collection.JavaConverters._
 import scala.collection.Map
 
-import org.json4s.jackson.JsonMethods._
 import org.json4s.JsonAST.{JArray, JInt, JString, JValue}
 import org.json4s.JsonDSL._
+import org.json4s.jackson.JsonMethods._
 import org.scalatest.Assertions
 import org.scalatest.exceptions.TestFailedException
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/ml/BucketedRandomProjectionLSHExample.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/BucketedRandomProjectionLSHExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/BucketedRandomProjectionLSHExample.scala
index 16da4fa..58f9fb3 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/BucketedRandomProjectionLSHExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/BucketedRandomProjectionLSHExample.scala
@@ -21,9 +21,9 @@ package org.apache.spark.examples.ml
 // $example on$
 import org.apache.spark.ml.feature.BucketedRandomProjectionLSH
 import org.apache.spark.ml.linalg.Vectors
+import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.functions.col
 // $example off$
-import org.apache.spark.sql.SparkSession
 
 /**
  * An example demonstrating BucketedRandomProjectionLSH.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/ml/MinHashLSHExample.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/MinHashLSHExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/MinHashLSHExample.scala
index b94ab9b..8515821 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/MinHashLSHExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/MinHashLSHExample.scala
@@ -21,9 +21,9 @@ package org.apache.spark.examples.ml
 // $example on$
 import org.apache.spark.ml.feature.MinHashLSH
 import org.apache.spark.ml.linalg.Vectors
+import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.functions.col
 // $example off$
-import org.apache.spark.sql.SparkSession
 
 /**
  * An example demonstrating MinHashLSH.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/ml/TokenizerExample.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/TokenizerExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/TokenizerExample.scala
index 0167dc3..1547776 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/TokenizerExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/TokenizerExample.scala
@@ -20,9 +20,9 @@ package org.apache.spark.examples.ml
 
 // $example on$
 import org.apache.spark.ml.feature.{RegexTokenizer, Tokenizer}
+import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.functions._
 // $example off$
-import org.apache.spark.sql.SparkSession
 
 object TokenizerExample {
   def main(args: Array[String]): Unit = {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/ml/UnaryTransformerExample.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/UnaryTransformerExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/UnaryTransformerExample.scala
index 13b58d1..b4179ec 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/UnaryTransformerExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/UnaryTransformerExample.scala
@@ -22,10 +22,8 @@ package org.apache.spark.examples.ml
 import org.apache.spark.ml.UnaryTransformer
 import org.apache.spark.ml.param.DoubleParam
 import org.apache.spark.ml.util.{DefaultParamsReadable, DefaultParamsWritable, Identifiable}
-import org.apache.spark.sql.functions.col
-// $example off$
 import org.apache.spark.sql.SparkSession
-// $example on$
+import org.apache.spark.sql.functions.col
 import org.apache.spark.sql.types.{DataType, DataTypes}
 import org.apache.spark.util.Utils
 // $example off$

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala
index 63a6091..9a0af5d 100644
--- a/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ml/VectorSlicerExample.scala
@@ -24,10 +24,9 @@ import java.util.Arrays
 import org.apache.spark.ml.attribute.{Attribute, AttributeGroup, NumericAttribute}
 import org.apache.spark.ml.feature.VectorSlicer
 import org.apache.spark.ml.linalg.Vectors
-import org.apache.spark.sql.Row
+import org.apache.spark.sql.{Row, SparkSession}
 import org.apache.spark.sql.types.StructType
 // $example off$
-import org.apache.spark.sql.SparkSession
 
 object VectorSlicerExample {
   def main(args: Array[String]): Unit = {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnyPCA.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnyPCA.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnyPCA.scala
index 464fbd3..03bc675 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnyPCA.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnyPCA.scala
@@ -19,8 +19,8 @@
 package org.apache.spark.examples.mllib
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.mllib.linalg.distributed.RowMatrix
 import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.mllib.linalg.distributed.RowMatrix
 
 /**
  * Compute the principal components of a tall-and-skinny matrix, whose rows are observations.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnySVD.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnySVD.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnySVD.scala
index 65b4bc4..067e49b 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnySVD.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/TallSkinnySVD.scala
@@ -19,8 +19,8 @@
 package org.apache.spark.examples.mllib
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.mllib.linalg.distributed.RowMatrix
 import org.apache.spark.mllib.linalg.Vectors
+import org.apache.spark.mllib.linalg.distributed.RowMatrix
 
 /**
  * Compute the singular value decomposition (SVD) of a tall-and-skinny matrix.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala b/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala
index cf12c98..6bd9634 100644
--- a/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala
@@ -21,13 +21,13 @@ import java.util.{Collection => JCollection, Map => JMap}
 
 import scala.collection.JavaConverters._
 
-import org.apache.avro.generic.{GenericFixed, IndexedRecord}
-import org.apache.avro.mapred.AvroWrapper
 import org.apache.avro.Schema
 import org.apache.avro.Schema.Type._
+import org.apache.avro.generic.{GenericFixed, IndexedRecord}
+import org.apache.avro.mapred.AvroWrapper
 
-import org.apache.spark.api.python.Converter
 import org.apache.spark.SparkException
+import org.apache.spark.api.python.Converter
 
 
 object AvroConversionUtil extends Serializable {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedTypedAggregation.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedTypedAggregation.scala b/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedTypedAggregation.scala
index ac617d1..f04a831 100644
--- a/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedTypedAggregation.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedTypedAggregation.scala
@@ -17,10 +17,8 @@
 package org.apache.spark.examples.sql
 
 // $example on:typed_custom_aggregation$
+import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
 import org.apache.spark.sql.expressions.Aggregator
-import org.apache.spark.sql.Encoder
-import org.apache.spark.sql.Encoders
-import org.apache.spark.sql.SparkSession
 // $example off:typed_custom_aggregation$
 
 object UserDefinedTypedAggregation {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedUntypedAggregation.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedUntypedAggregation.scala b/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedUntypedAggregation.scala
index 9c9ebc5..3656a84 100644
--- a/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedUntypedAggregation.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/sql/UserDefinedUntypedAggregation.scala
@@ -17,11 +17,10 @@
 package org.apache.spark.examples.sql
 
 // $example on:untyped_custom_aggregation$
+import org.apache.spark.sql.{Row, SparkSession}
 import org.apache.spark.sql.expressions.MutableAggregationBuffer
 import org.apache.spark.sql.expressions.UserDefinedAggregateFunction
 import org.apache.spark.sql.types._
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.SparkSession
 // $example off:untyped_custom_aggregation$
 
 object UserDefinedUntypedAggregation {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/examples/src/main/scala/org/apache/spark/examples/streaming/DirectKafkaWordCount.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/streaming/DirectKafkaWordCount.scala b/examples/src/main/scala/org/apache/spark/examples/streaming/DirectKafkaWordCount.scala
index bd78526..474b03a 100644
--- a/examples/src/main/scala/org/apache/spark/examples/streaming/DirectKafkaWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/streaming/DirectKafkaWordCount.scala
@@ -20,9 +20,9 @@ package org.apache.spark.examples.streaming
 
 import kafka.serializer.StringDecoder
 
+import org.apache.spark.SparkConf
 import org.apache.spark.streaming._
 import org.apache.spark.streaming.kafka._
-import org.apache.spark.SparkConf
 
 /**
  * Consumes messages from one or more topics in Kafka and does wordcount.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
index 945cfa7..e8623b4 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeTestUtils.scala
@@ -35,8 +35,8 @@ import org.jboss.netty.channel.socket.SocketChannel
 import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory
 import org.jboss.netty.handler.codec.compression.{ZlibDecoder, ZlibEncoder}
 
-import org.apache.spark.util.Utils
 import org.apache.spark.SparkConf
+import org.apache.spark.util.Utils
 
 /**
  * Share codes for Scala and Python unit tests

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
index 15ff4f6..a3e784a 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/PollingFlumeTestUtils.scala
@@ -23,10 +23,10 @@ import java.util.concurrent._
 
 import scala.collection.mutable.ArrayBuffer
 
-import org.apache.flume.event.EventBuilder
 import org.apache.flume.Context
 import org.apache.flume.channel.MemoryChannel
 import org.apache.flume.conf.Configurables
+import org.apache.flume.event.EventBuilder
 
 import org.apache.spark.streaming.flume.sink.{SparkSink, SparkSinkConfig}
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
----------------------------------------------------------------------
diff --git a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
index 5915d9f..066a68a 100644
--- a/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
+++ b/external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala
@@ -40,9 +40,9 @@ import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.time.SpanSugar._
 
+import org.apache.spark.SparkConf
 import org.apache.spark.internal.Logging
 import org.apache.spark.util.Utils
-import org.apache.spark.SparkConf
 
 /**
  * This is a helper class for Kafka test suites. This has the functionality to set up

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
----------------------------------------------------------------------
diff --git a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
index 88a312a..453b5e5 100644
--- a/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
+++ b/external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala
@@ -20,8 +20,8 @@ package org.apache.spark.streaming.kafka010
 import java.io.File
 import java.lang.{ Long => JLong }
 import java.util.{ Arrays, HashMap => JHashMap, Map => JMap }
-import java.util.concurrent.atomic.AtomicLong
 import java.util.concurrent.ConcurrentLinkedQueue
+import java.util.concurrent.atomic.AtomicLong
 
 import scala.collection.JavaConverters._
 import scala.concurrent.duration._

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/external/kafka-0-8/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala
----------------------------------------------------------------------
diff --git a/external/kafka-0-8/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala b/external/kafka-0-8/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala
index f8b3407..06ef5bc 100644
--- a/external/kafka-0-8/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala
+++ b/external/kafka-0-8/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala
@@ -19,8 +19,8 @@ package org.apache.spark.streaming.kafka
 
 import java.io.File
 import java.util.Arrays
-import java.util.concurrent.atomic.AtomicLong
 import java.util.concurrent.ConcurrentLinkedQueue
+import java.util.concurrent.atomic.AtomicLong
 
 import scala.collection.JavaConverters._
 import scala.concurrent.duration._

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
----------------------------------------------------------------------
diff --git a/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala b/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
index decfb6b..f61e398 100644
--- a/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
+++ b/external/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala
@@ -21,7 +21,6 @@ import scala.reflect.ClassTag
 
 import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream
 import com.amazonaws.services.kinesis.model.Record
-import KinesisReadConfigurations._
 
 import org.apache.spark.annotation.InterfaceStability
 import org.apache.spark.rdd.RDD
@@ -47,6 +46,8 @@ private[kinesis] class KinesisInputDStream[T: ClassTag](
     val cloudWatchCreds: Option[SparkAWSCredentials]
   ) extends ReceiverInputDStream[T](_ssc) {
 
+  import KinesisReadConfigurations._
+
   private[streaming]
   override def createBlockRDD(time: Time, blockInfos: Seq[ReceivedBlockInfo]): RDD[T] = {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
index 475bccf..49e8487 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
@@ -20,10 +20,10 @@ package org.apache.spark.graphx
 import scala.reflect.ClassTag
 import scala.util.Random
 
+import org.apache.spark.SparkException
 import org.apache.spark.graphx.lib._
 import org.apache.spark.ml.linalg.Vector
 import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkException
 
 /**
  * Contains additional functionality for [[Graph]]. All operations are expressed in terms of the

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib-local/src/test/scala/org/apache/spark/ml/impl/UtilsSuite.scala
----------------------------------------------------------------------
diff --git a/mllib-local/src/test/scala/org/apache/spark/ml/impl/UtilsSuite.scala b/mllib-local/src/test/scala/org/apache/spark/ml/impl/UtilsSuite.scala
index 44b122b..20e7c10 100644
--- a/mllib-local/src/test/scala/org/apache/spark/ml/impl/UtilsSuite.scala
+++ b/mllib-local/src/test/scala/org/apache/spark/ml/impl/UtilsSuite.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark.ml.impl
 
-import org.apache.spark.ml.impl.Utils.EPSILON
 import org.apache.spark.ml.SparkMLFunSuite
+import org.apache.spark.ml.impl.Utils.EPSILON
 
 
 class UtilsSuite extends SparkMLFunSuite {

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala
index 902f84f..4ff1d0e 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala
@@ -21,12 +21,12 @@ import scala.collection.mutable.ArrayBuilder
 
 import org.apache.spark.SparkException
 import org.apache.spark.annotation.Since
+import org.apache.spark.ml.Transformer
 import org.apache.spark.ml.attribute._
+import org.apache.spark.ml.linalg.{Vector, Vectors, VectorUDT}
 import org.apache.spark.ml.param._
 import org.apache.spark.ml.param.shared._
 import org.apache.spark.ml.util._
-import org.apache.spark.ml.Transformer
-import org.apache.spark.ml.linalg.{Vector, Vectors, VectorUDT}
 import org.apache.spark.sql.{DataFrame, Dataset, Row}
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.types._

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/ml/feature/SQLTransformer.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/SQLTransformer.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/SQLTransformer.scala
index 65db06c..62c1972 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/feature/SQLTransformer.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/feature/SQLTransformer.scala
@@ -18,8 +18,8 @@
 package org.apache.spark.ml.feature
 
 import org.apache.spark.annotation.Since
-import org.apache.spark.ml.param.{Param, ParamMap}
 import org.apache.spark.ml.Transformer
+import org.apache.spark.ml.param.{Param, ParamMap}
 import org.apache.spark.ml.util._
 import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
 import org.apache.spark.sql.types.StructType

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/param/params.scala b/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
index 12ad800..ac68b82 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/param/params.scala
@@ -22,8 +22,8 @@ import java.util.{List => JList}
 import java.util.NoSuchElementException
 
 import scala.annotation.varargs
-import scala.collection.mutable
 import scala.collection.JavaConverters._
+import scala.collection.mutable
 
 import org.json4s._
 import org.json4s.jackson.JsonMethods._

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
index 50931fe..ed431f5 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/regression/LinearRegression.scala
@@ -27,11 +27,11 @@ import org.apache.hadoop.fs.Path
 import org.apache.spark.SparkException
 import org.apache.spark.annotation.{Experimental, Since}
 import org.apache.spark.internal.Logging
+import org.apache.spark.ml.PredictorParams
 import org.apache.spark.ml.feature.Instance
 import org.apache.spark.ml.linalg.{Vector, Vectors}
 import org.apache.spark.ml.linalg.BLAS._
 import org.apache.spark.ml.optim.WeightedLeastSquares
-import org.apache.spark.ml.PredictorParams
 import org.apache.spark.ml.optim.aggregator.LeastSquaresAggregator
 import org.apache.spark.ml.optim.loss.{L2Regularization, RDDLossFunction}
 import org.apache.spark.ml.param.{Param, ParamMap, ParamValidators}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/ml/source/libsvm/LibSVMRelation.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/source/libsvm/LibSVMRelation.scala b/mllib/src/main/scala/org/apache/spark/ml/source/libsvm/LibSVMRelation.scala
index dec1183..74aaed9 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/source/libsvm/LibSVMRelation.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/source/libsvm/LibSVMRelation.scala
@@ -23,8 +23,8 @@ import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileStatus, Path}
 import org.apache.hadoop.mapreduce.{Job, TaskAttemptContext}
 
-import org.apache.spark.internal.Logging
 import org.apache.spark.TaskContext
+import org.apache.spark.internal.Logging
 import org.apache.spark.ml.feature.LabeledPoint
 import org.apache.spark.ml.linalg.{Vectors, VectorUDT}
 import org.apache.spark.mllib.util.MLUtils

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/mllib/clustering/BisectingKMeansModel.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/clustering/BisectingKMeansModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/BisectingKMeansModel.scala
index 6f1ab09..633bda6 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/clustering/BisectingKMeansModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/BisectingKMeansModel.scala
@@ -19,8 +19,8 @@ package org.apache.spark.mllib.clustering
 
 import org.json4s._
 import org.json4s.DefaultFormats
-import org.json4s.jackson.JsonMethods._
 import org.json4s.JsonDSL._
+import org.json4s.jackson.JsonMethods._
 
 import org.apache.spark.SparkContext
 import org.apache.spark.annotation.Since

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala b/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala
index 862be6f..32f1555 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala
@@ -23,13 +23,13 @@ import org.json4s._
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 
+import org.apache.spark.SparkContext
 import org.apache.spark.annotation.Since
 import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
 import org.apache.spark.mllib.regression.LabeledPoint
 import org.apache.spark.mllib.stat.Statistics
 import org.apache.spark.mllib.util.{Loader, Saveable}
 import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext
 import org.apache.spark.sql.{Row, SparkSession}
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala
index 2d23650..8347cca 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala
@@ -28,14 +28,13 @@ import org.json4s._
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 
-import org.apache.spark.SparkContext
+import org.apache.spark.{RangePartitioner, SparkContext}
 import org.apache.spark.annotation.Since
 import org.apache.spark.api.java.{JavaDoubleRDD, JavaRDD}
 import org.apache.spark.mllib.linalg.{Vector, Vectors}
 import org.apache.spark.mllib.util.{Loader, Saveable}
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.SparkSession
-import org.apache.spark.RangePartitioner
 
 /**
  * Regression model for isotonic regression.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
index f082b16..4381d6a 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
@@ -19,11 +19,11 @@ package org.apache.spark.mllib.regression
 
 import scala.beans.BeanInfo
 
+import org.apache.spark.SparkException
 import org.apache.spark.annotation.Since
 import org.apache.spark.ml.feature.{LabeledPoint => NewLabeledPoint}
 import org.apache.spark.mllib.linalg.{Vector, Vectors}
 import org.apache.spark.mllib.util.NumericParser
-import org.apache.spark.SparkException
 
 /**
  * Class that represents the features and labels of a data point.

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
----------------------------------------------------------------------
diff --git a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
index 45d3f9b..ac73191 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
@@ -20,10 +20,10 @@ package org.apache.spark.ml.recommendation
 import java.io.File
 import java.util.Random
 
+import scala.collection.JavaConverters._
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable.WrappedArray
-import scala.collection.JavaConverters._
 import scala.language.existentials
 
 import com.github.fommil.netlib.BLAS.{getInstance => blas}

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 9616f6d..d544894 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2424,7 +2424,7 @@
       <plugin>
         <groupId>org.scalastyle</groupId>
         <artifactId>scalastyle-maven-plugin</artifactId>
-        <version>0.8.0</version>
+        <version>0.9.0</version>
         <configuration>
           <verbose>false</verbose>
           <failOnViolation>true</failOnViolation>

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 371a171..7565e14 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -30,7 +30,7 @@ import sbtunidoc.Plugin.UnidocKeys.unidocGenjavadocVersion
 import com.simplytyped.Antlr4Plugin._
 import com.typesafe.sbt.pom.{PomBuild, SbtPomKeys}
 import com.typesafe.tools.mima.plugin.MimaKeys
-import org.scalastyle.sbt.ScalastylePlugin._
+import org.scalastyle.sbt.ScalastylePlugin.autoImport._
 import org.scalastyle.sbt.Tasks
 
 import spray.revolver.RevolverPlugin._
@@ -116,7 +116,7 @@ object SparkBuild extends PomBuild {
 
   lazy val scalaStyleRules = Project("scalaStyleRules", file("scalastyle"))
     .settings(
-      libraryDependencies += "org.scalastyle" %% "scalastyle" % "0.8.0"
+      libraryDependencies += "org.scalastyle" %% "scalastyle" % "0.9.0"
     )
 
   lazy val scalaStyleOnCompile = taskKey[Unit]("scalaStyleOnCompile")

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 2b49c29..f67e0a1 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -8,7 +8,7 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0")
 addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2")
 
 // need to make changes to uptake sbt 1.0 support in "org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0"
-addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0")
+addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0")
 
 addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.17")
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcherArguments.scala
----------------------------------------------------------------------
diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcherArguments.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcherArguments.scala
index ddea762..096bb4e 100644
--- a/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcherArguments.scala
+++ b/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcherArguments.scala
@@ -20,8 +20,8 @@ package org.apache.spark.deploy.mesos
 import scala.annotation.tailrec
 import scala.collection.mutable
 
-import org.apache.spark.util.{IntParam, Utils}
 import org.apache.spark.SparkConf
+import org.apache.spark.util.{IntParam, Utils}
 
 private[mesos] class MesosClusterDispatcherArguments(args: Array[String], conf: SparkConf) {
   var host: String = Utils.localHostName()

http://git-wip-us.apache.org/repos/asf/spark/blob/3f958a99/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/ui/MesosClusterPage.scala
----------------------------------------------------------------------
diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/ui/MesosClusterPage.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/ui/MesosClusterPage.scala
index 219b52e..88a6614 100644
--- a/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/ui/MesosClusterPage.scala
+++ b/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/ui/MesosClusterPage.scala
@@ -23,8 +23,8 @@ import scala.xml.Node
 
 import org.apache.mesos.Protos.TaskStatus
 
-import org.apache.spark.deploy.mesos.config._
 import org.apache.spark.deploy.mesos.MesosDriverDescription
+import org.apache.spark.deploy.mesos.config._
 import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState
 import org.apache.spark.ui.{UIUtils, WebUIPage}
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org