You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by we...@apache.org on 2016/03/17 12:24:01 UTC
[5/5] spark git commit: [SPARK-13928] Move org.apache.spark.Logging
into org.apache.spark.internal.Logging
[SPARK-13928] Move org.apache.spark.Logging into org.apache.spark.internal.Logging
## What changes were proposed in this pull request?
Logging was made private in Spark 2.0. If we move it, then users would be able to create a Logging trait themselves to avoid changing their own code.
## How was this patch tested?
existing tests.
Author: Wenchen Fan <we...@databricks.com>
Closes #11764 from cloud-fan/logger.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8ef3399a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8ef3399a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8ef3399a
Branch: refs/heads/master
Commit: 8ef3399aff04bf8b7ab294c0f55bcf195995842b
Parents: ea9ca6f
Author: Wenchen Fan <we...@databricks.com>
Authored: Thu Mar 17 19:23:38 2016 +0800
Committer: Wenchen Fan <we...@databricks.com>
Committed: Thu Mar 17 19:23:38 2016 +0800
----------------------------------------------------------------------
.../scala/org/apache/spark/Accumulator.scala | 1 +
.../scala/org/apache/spark/ContextCleaner.scala | 1 +
.../spark/ExecutorAllocationManager.scala | 1 +
.../org/apache/spark/HeartbeatReceiver.scala | 1 +
.../scala/org/apache/spark/HttpServer.scala | 1 +
.../main/scala/org/apache/spark/Logging.scala | 167 -------------------
.../org/apache/spark/MapOutputTracker.scala | 1 +
.../scala/org/apache/spark/SSLOptions.scala | 2 +
.../org/apache/spark/SecurityManager.scala | 1 +
.../main/scala/org/apache/spark/SparkConf.scala | 4 +-
.../scala/org/apache/spark/SparkContext.scala | 1 +
.../main/scala/org/apache/spark/SparkEnv.scala | 1 +
.../org/apache/spark/SparkHadoopWriter.scala | 1 +
.../org/apache/spark/TaskContextImpl.scala | 1 +
.../scala/org/apache/spark/TaskEndReason.scala | 1 +
.../spark/api/python/PythonGatewayServer.scala | 2 +-
.../spark/api/python/PythonHadoopUtil.scala | 3 +-
.../org/apache/spark/api/python/PythonRDD.scala | 1 +
.../spark/api/python/PythonWorkerFactory.scala | 1 +
.../org/apache/spark/api/python/SerDeUtil.scala | 3 +-
.../scala/org/apache/spark/api/r/RBackend.scala | 3 +-
.../apache/spark/api/r/RBackendHandler.scala | 2 +-
.../scala/org/apache/spark/api/r/RRDD.scala | 1 +
.../org/apache/spark/broadcast/Broadcast.scala | 2 +-
.../spark/broadcast/BroadcastManager.scala | 3 +-
.../spark/broadcast/TorrentBroadcast.scala | 1 +
.../scala/org/apache/spark/deploy/Client.scala | 5 +-
.../spark/deploy/ExternalShuffleService.scala | 3 +-
.../spark/deploy/FaultToleranceTest.scala | 3 +-
.../apache/spark/deploy/LocalSparkCluster.scala | 3 +-
.../org/apache/spark/deploy/RPackageUtils.scala | 2 +-
.../apache/spark/deploy/SparkCuratorUtil.scala | 3 +-
.../apache/spark/deploy/SparkHadoopUtil.scala | 3 +-
.../apache/spark/deploy/client/AppClient.scala | 5 +-
.../spark/deploy/history/ApplicationCache.scala | 2 +-
.../deploy/history/FsHistoryProvider.scala | 3 +-
.../spark/deploy/history/HistoryServer.scala | 3 +-
.../deploy/history/HistoryServerArguments.scala | 3 +-
.../master/FileSystemPersistenceEngine.scala | 2 +-
.../org/apache/spark/deploy/master/Master.scala | 3 +-
.../deploy/master/RecoveryModeFactory.scala | 3 +-
.../master/ZooKeeperLeaderElectionAgent.scala | 3 +-
.../master/ZooKeeperPersistenceEngine.scala | 3 +-
.../spark/deploy/master/ui/MasterWebUI.scala | 2 +-
.../deploy/mesos/MesosClusterDispatcher.scala | 3 +-
.../mesos/MesosExternalShuffleService.scala | 3 +-
.../deploy/rest/RestSubmissionClient.scala | 3 +-
.../deploy/rest/RestSubmissionServer.scala | 3 +-
.../spark/deploy/worker/CommandUtils.scala | 2 +-
.../spark/deploy/worker/DriverRunner.scala | 3 +-
.../spark/deploy/worker/ExecutorRunner.scala | 3 +-
.../org/apache/spark/deploy/worker/Worker.scala | 3 +-
.../spark/deploy/worker/WorkerWatcher.scala | 2 +-
.../apache/spark/deploy/worker/ui/LogPage.scala | 2 +-
.../spark/deploy/worker/ui/WorkerWebUI.scala | 2 +-
.../executor/CoarseGrainedExecutorBackend.scala | 1 +
.../org/apache/spark/executor/Executor.scala | 1 +
.../spark/executor/MesosExecutorBackend.scala | 3 +-
.../org/apache/spark/executor/TaskMetrics.scala | 1 +
.../input/FixedLengthBinaryInputFormat.scala | 2 +-
.../org/apache/spark/internal/Logging.scala | 167 +++++++++++++++++++
.../spark/mapred/SparkHadoopMapRedUtil.scala | 3 +-
.../spark/memory/ExecutionMemoryPool.scala | 2 +-
.../org/apache/spark/memory/MemoryManager.scala | 3 +-
.../apache/spark/memory/StorageMemoryPool.scala | 2 +-
.../apache/spark/metrics/MetricsConfig.scala | 3 +-
.../apache/spark/metrics/MetricsSystem.scala | 3 +-
.../spark/network/BlockTransferService.scala | 4 +-
.../network/netty/NettyBlockRpcServer.scala | 2 +-
.../org/apache/spark/rdd/AsyncRDDActions.scala | 3 +-
.../apache/spark/rdd/DoubleRDDFunctions.scala | 3 +-
.../scala/org/apache/spark/rdd/HadoopRDD.scala | 1 +
.../scala/org/apache/spark/rdd/JdbcRDD.scala | 3 +-
.../spark/rdd/LocalRDDCheckpointData.scala | 3 +-
.../org/apache/spark/rdd/NewHadoopRDD.scala | 1 +
.../apache/spark/rdd/OrderedRDDFunctions.scala | 6 +-
.../org/apache/spark/rdd/PairRDDFunctions.scala | 1 +
.../main/scala/org/apache/spark/rdd/RDD.scala | 1 +
.../apache/spark/rdd/RDDOperationScope.scala | 3 +-
.../spark/rdd/ReliableCheckpointRDD.scala | 1 +
.../spark/rdd/ReliableRDDCheckpointData.scala | 1 +
.../spark/rdd/SequenceFileRDDFunctions.scala | 2 +-
.../org/apache/spark/rpc/RpcEndpointRef.scala | 3 +-
.../org/apache/spark/rpc/netty/Dispatcher.scala | 3 +-
.../org/apache/spark/rpc/netty/Inbox.scala | 3 +-
.../spark/rpc/netty/NettyRpcCallContext.scala | 2 +-
.../apache/spark/rpc/netty/NettyRpcEnv.scala | 3 +-
.../org/apache/spark/rpc/netty/Outbox.scala | 3 +-
.../apache/spark/scheduler/DAGScheduler.scala | 2 +-
.../spark/scheduler/EventLoggingListener.scala | 3 +-
.../spark/scheduler/InputFormatInfo.scala | 2 +-
.../org/apache/spark/scheduler/JobWaiter.scala | 2 +-
.../scheduler/OutputCommitCoordinator.scala | 1 +
.../scala/org/apache/spark/scheduler/Pool.scala | 2 +-
.../spark/scheduler/ReplayListenerBus.scala | 2 +-
.../spark/scheduler/SchedulableBuilder.scala | 3 +-
.../apache/spark/scheduler/ShuffleMapTask.scala | 1 +
.../apache/spark/scheduler/SparkListener.scala | 3 +-
.../org/apache/spark/scheduler/Stage.scala | 1 +
.../spark/scheduler/TaskResultGetter.scala | 1 +
.../spark/scheduler/TaskSchedulerImpl.scala | 1 +
.../apache/spark/scheduler/TaskSetManager.scala | 2 +-
.../cluster/CoarseGrainedSchedulerBackend.scala | 3 +-
.../cluster/SparkDeploySchedulerBackend.scala | 3 +-
.../mesos/MesosClusterPersistenceEngine.scala | 3 +-
.../mesos/MesosSchedulerBackendUtil.scala | 3 +-
.../cluster/mesos/MesosSchedulerUtils.scala | 3 +-
.../cluster/mesos/MesosTaskLaunchData.scala | 2 +-
.../spark/scheduler/local/LocalBackend.scala | 3 +-
.../spark/serializer/KryoSerializer.scala | 1 +
.../serializer/SerializationDebugger.scala | 2 +-
.../spark/shuffle/BlockStoreShuffleReader.scala | 1 +
.../shuffle/FileShuffleBlockResolver.scala | 3 +-
.../shuffle/IndexShuffleBlockResolver.scala | 3 +-
.../spark/shuffle/hash/HashShuffleManager.scala | 1 +
.../spark/shuffle/hash/HashShuffleWriter.scala | 1 +
.../spark/shuffle/sort/SortShuffleManager.scala | 1 +
.../spark/shuffle/sort/SortShuffleWriter.scala | 1 +
.../api/v1/EventLogDownloadResource.scala | 3 +-
.../apache/spark/storage/BlockInfoManager.scala | 3 +-
.../org/apache/spark/storage/BlockManager.scala | 1 +
.../spark/storage/BlockManagerMaster.scala | 3 +-
.../storage/BlockManagerMasterEndpoint.scala | 3 +-
.../storage/BlockManagerSlaveEndpoint.scala | 3 +-
.../apache/spark/storage/DiskBlockManager.scala | 3 +-
.../spark/storage/DiskBlockObjectWriter.scala | 2 +-
.../org/apache/spark/storage/DiskStore.scala | 3 +-
.../storage/ShuffleBlockFetcherIterator.scala | 3 +-
.../spark/storage/memory/MemoryStore.scala | 3 +-
.../apache/spark/ui/ConsoleProgressBar.scala | 1 +
.../scala/org/apache/spark/ui/JettyUtils.scala | 3 +-
.../scala/org/apache/spark/ui/SparkUI.scala | 3 +-
.../scala/org/apache/spark/ui/UIUtils.scala | 2 +-
.../main/scala/org/apache/spark/ui/WebUI.scala | 3 +-
.../spark/ui/jobs/JobProgressListener.scala | 1 +
.../spark/ui/scope/RDDOperationGraph.scala | 2 +-
.../org/apache/spark/util/ClosureCleaner.scala | 3 +-
.../scala/org/apache/spark/util/EventLoop.scala | 2 +-
.../org/apache/spark/util/ListenerBus.scala | 2 +-
.../apache/spark/util/ShutdownHookManager.scala | 2 +-
.../org/apache/spark/util/SizeEstimator.scala | 2 +-
.../util/SparkUncaughtExceptionHandler.scala | 2 +-
.../apache/spark/util/TimeStampedHashMap.scala | 2 +-
.../scala/org/apache/spark/util/Utils.scala | 1 +
.../util/collection/ExternalAppendOnlyMap.scala | 3 +-
.../spark/util/collection/ExternalSorter.scala | 1 +
.../spark/util/collection/Spillable.scala | 3 +-
.../spark/util/logging/FileAppender.scala | 3 +-
.../spark/util/logging/RollingPolicy.scala | 2 +-
.../util/random/StratifiedSamplingUtils.scala | 2 +-
.../org/apache/spark/ContextCleanerSuite.scala | 1 +
.../SparkContextSchedulerCreationSuite.scala | 1 +
.../scala/org/apache/spark/SparkFunSuite.scala | 2 +
.../scala/org/apache/spark/ThreadingSuite.scala | 2 +
.../apache/spark/deploy/SparkSubmitSuite.scala | 1 +
.../spark/deploy/client/AppClientSuite.scala | 1 +
.../deploy/history/ApplicationCacheSuite.scala | 3 +-
.../deploy/history/FsHistoryProviderSuite.scala | 3 +-
.../input/WholeTextFileRecordReaderSuite.scala | 3 +-
.../org/apache/spark/rdd/SortingSuite.scala | 3 +-
.../scheduler/EventLoggingListenerSuite.scala | 1 +
.../scheduler/TaskSchedulerImplSuite.scala | 1 +
.../spark/scheduler/TaskSetManagerSuite.scala | 1 +
.../apache/spark/util/FileAppenderSuite.scala | 3 +-
.../org/apache/spark/util/UtilsSuite.scala | 3 +-
.../spark/util/collection/SorterSuite.scala | 3 +-
.../spark/examples/graphx/Analytics.scala | 1 +
.../examples/streaming/CustomReceiver.scala | 3 +-
.../examples/streaming/StreamingExamples.scala | 2 +-
.../kafka/DirectKafkaInputDStream.scala | 3 +-
.../streaming/kafka/KafkaInputDStream.scala | 2 +-
.../apache/spark/streaming/kafka/KafkaRDD.scala | 3 +-
.../spark/streaming/kafka/KafkaTestUtils.scala | 3 +-
.../streaming/kafka/ReliableKafkaReceiver.scala | 3 +-
.../kafka/DirectKafkaStreamSuite.scala | 3 +-
.../streaming/KinesisWordCountASL.scala | 3 +-
.../kinesis/KinesisBackedBlockRDD.scala | 1 +
.../streaming/kinesis/KinesisCheckpointer.scala | 2 +-
.../streaming/kinesis/KinesisReceiver.scala | 2 +-
.../kinesis/KinesisRecordProcessor.scala | 2 +-
.../streaming/kinesis/KinesisTestUtils.scala | 2 +-
.../org/apache/spark/graphx/GraphLoader.scala | 3 +-
.../scala/org/apache/spark/graphx/Pregel.scala | 2 +-
.../graphx/impl/VertexPartitionBaseOps.scala | 2 +-
.../org/apache/spark/graphx/lib/PageRank.scala | 2 +-
.../spark/graphx/util/GraphGenerators.scala | 1 +
.../scala/org/apache/spark/ml/Pipeline.scala | 3 +-
.../scala/org/apache/spark/ml/Transformer.scala | 2 +-
.../spark/ml/classification/GBTClassifier.scala | 2 +-
.../ml/classification/LogisticRegression.scala | 3 +-
.../org/apache/spark/ml/clustering/LDA.scala | 2 +-
.../spark/ml/feature/QuantileDiscretizer.scala | 2 +-
.../IterativelyReweightedLeastSquares.scala | 2 +-
.../spark/ml/optim/WeightedLeastSquares.scala | 2 +-
.../apache/spark/ml/recommendation/ALS.scala | 3 +-
.../ml/regression/AFTSurvivalRegression.scala | 3 +-
.../spark/ml/regression/GBTRegressor.scala | 2 +-
.../GeneralizedLinearRegression.scala | 3 +-
.../ml/regression/IsotonicRegression.scala | 2 +-
.../spark/ml/regression/LinearRegression.scala | 3 +-
.../ml/tree/impl/GradientBoostedTrees.scala | 2 +-
.../apache/spark/ml/tree/impl/NodeIdCache.scala | 2 +-
.../spark/ml/tree/impl/RandomForest.scala | 2 +-
.../apache/spark/ml/tuning/CrossValidator.scala | 3 +-
.../spark/ml/tuning/TrainValidationSplit.scala | 2 +-
.../org/apache/spark/ml/util/ReadWrite.scala | 3 +-
.../spark/mllib/classification/NaiveBayes.scala | 3 +-
.../mllib/clustering/BisectingKMeans.scala | 2 +-
.../mllib/clustering/BisectingKMeansModel.scala | 2 +-
.../apache/spark/mllib/clustering/KMeans.scala | 2 +-
.../org/apache/spark/mllib/clustering/LDA.scala | 2 +-
.../spark/mllib/clustering/LocalKMeans.scala | 2 +-
.../clustering/PowerIterationClustering.scala | 3 +-
.../mllib/clustering/StreamingKMeans.scala | 2 +-
.../BinaryClassificationMetrics.scala | 2 +-
.../spark/mllib/evaluation/RankingMetrics.scala | 2 +-
.../mllib/evaluation/RegressionMetrics.scala | 4 +-
.../spark/mllib/feature/StandardScaler.scala | 2 +-
.../apache/spark/mllib/feature/Word2Vec.scala | 3 +-
.../spark/mllib/fpm/AssociationRules.scala | 2 +-
.../org/apache/spark/mllib/fpm/FPGrowth.scala | 3 +-
.../spark/mllib/fpm/LocalPrefixSpan.scala | 2 +-
.../org/apache/spark/mllib/fpm/PrefixSpan.scala | 2 +-
.../spark/mllib/impl/PeriodicCheckpointer.scala | 3 +-
.../org/apache/spark/mllib/linalg/BLAS.scala | 2 +-
.../mllib/linalg/distributed/BlockMatrix.scala | 3 +-
.../mllib/linalg/distributed/RowMatrix.scala | 2 +-
.../mllib/optimization/GradientDescent.scala | 2 +-
.../apache/spark/mllib/optimization/LBFGS.scala | 2 +-
.../apache/spark/mllib/recommendation/ALS.scala | 2 +-
.../MatrixFactorizationModel.scala | 3 +-
.../regression/GeneralizedLinearAlgorithm.scala | 3 +-
.../regression/StreamingLinearAlgorithm.scala | 2 +-
.../stat/correlation/PearsonCorrelation.scala | 2 +-
.../stat/correlation/SpearmanCorrelation.scala | 2 +-
.../spark/mllib/stat/test/ChiSqTest.scala | 3 +-
.../mllib/stat/test/KolmogorovSmirnovTest.scala | 2 +-
.../spark/mllib/stat/test/StreamingTest.scala | 2 +-
.../mllib/stat/test/StreamingTestMethod.scala | 2 +-
.../apache/spark/mllib/tree/DecisionTree.scala | 2 +-
.../spark/mllib/tree/GradientBoostedTrees.scala | 2 +-
.../apache/spark/mllib/tree/RandomForest.scala | 2 +-
.../mllib/tree/impl/DecisionTreeMetadata.scala | 2 +-
.../mllib/tree/model/DecisionTreeModel.scala | 3 +-
.../apache/spark/mllib/tree/model/Node.scala | 2 +-
.../mllib/tree/model/treeEnsembleModels.scala | 3 +-
.../spark/mllib/util/DataValidators.scala | 2 +-
.../spark/ml/feature/VectorIndexerSuite.scala | 3 +-
.../spark/ml/recommendation/ALSSuite.scala | 4 +-
.../spark/mllib/linalg/VectorsSuite.scala | 3 +-
.../spark/mllib/stat/CorrelationSuite.scala | 3 +-
.../mllib/tree/GradientBoostedTreesSuite.scala | 3 +-
project/MimaExcludes.scala | 8 +
.../main/scala/org/apache/spark/repl/Main.scala | 4 +-
.../org/apache/spark/repl/SparkExprTyper.scala | 2 +-
.../org/apache/spark/repl/SparkILoop.scala | 6 +-
.../org/apache/spark/repl/SparkIMain.scala | 3 +-
.../spark/repl/SparkJLineCompletion.scala | 6 +-
.../main/scala/org/apache/spark/repl/Main.scala | 1 +
.../apache/spark/repl/ExecutorClassLoader.scala | 3 +-
.../spark/repl/ExecutorClassLoaderSuite.scala | 1 +
.../catalyst/expressions/BoundAttribute.scala | 2 +-
.../expressions/codegen/CodeGenerator.scala | 2 +-
.../expressions/codegen/GenerateOrdering.scala | 2 +-
.../spark/sql/catalyst/parser/ParseDriver.scala | 2 +-
.../sql/catalyst/planning/QueryPlanner.scala | 2 +-
.../spark/sql/catalyst/planning/patterns.scala | 2 +-
.../catalyst/plans/logical/LogicalPlan.scala | 2 +-
.../apache/spark/sql/catalyst/rules/Rule.scala | 2 +-
.../spark/sql/catalyst/rules/RuleExecutor.scala | 2 +-
.../spark/sql/catalyst/trees/package.scala | 2 +-
.../scala/org/apache/spark/sql/Column.scala | 2 +-
.../org/apache/spark/sql/DataFrameReader.scala | 3 +-
.../scala/org/apache/spark/sql/SQLContext.scala | 3 +-
.../org/apache/spark/sql/UDFRegistration.scala | 2 +-
.../spark/sql/execution/CacheManager.scala | 2 +-
.../apache/spark/sql/execution/SparkPlan.scala | 3 +-
.../aggregate/AggregationIterator.scala | 2 +-
.../aggregate/TungstenAggregationIterator.scala | 3 +-
.../aggregate/TypedAggregateExpression.scala | 2 +-
.../spark/sql/execution/aggregate/udaf.scala | 2 +-
.../columnar/GenerateColumnAccessor.scala | 2 +-
.../compression/CompressibleColumnBuilder.scala | 2 +-
.../spark/sql/execution/command/commands.scala | 2 +-
.../spark/sql/execution/command/ddl.scala | 2 +-
.../sql/execution/datasources/DataSource.scala | 2 +-
.../datasources/DataSourceStrategy.scala | 3 +-
.../datasources/FileSourceStrategy.scala | 2 +-
.../execution/datasources/SqlNewHadoopRDD.scala | 1 +
.../execution/datasources/WriterContainer.scala | 1 +
.../execution/datasources/csv/CSVOptions.scala | 2 +-
.../execution/datasources/csv/CSVParser.scala | 2 +-
.../execution/datasources/csv/CSVRelation.scala | 2 +-
.../datasources/jdbc/DriverRegistry.scala | 2 +-
.../execution/datasources/jdbc/JDBCRDD.scala | 3 +-
.../execution/datasources/jdbc/JdbcUtils.scala | 2 +-
.../datasources/json/JSONRelation.scala | 2 +-
.../parquet/CatalystReadSupport.scala | 2 +-
.../parquet/CatalystRowConverter.scala | 2 +-
.../parquet/CatalystWriteSupport.scala | 2 +-
.../datasources/parquet/ParquetRelation.scala | 3 +-
.../spark/sql/execution/debug/package.scala | 3 +-
.../exchange/ExchangeCoordinator.scala | 3 +-
.../spark/sql/execution/python/PythonUDF.scala | 2 +-
.../sql/execution/stat/FrequentItems.scala | 2 +-
.../sql/execution/stat/StatFunctions.scala | 2 +-
.../execution/streaming/FileStreamSource.scala | 2 +-
.../execution/streaming/StreamExecution.scala | 2 +-
.../spark/sql/execution/streaming/memory.scala | 3 +-
.../sql/execution/ui/AllExecutionsPage.scala | 2 +-
.../spark/sql/execution/ui/ExecutionPage.scala | 2 +-
.../spark/sql/execution/ui/SQLListener.scala | 3 +-
.../apache/spark/sql/execution/ui/SQLTab.scala | 2 +-
.../org/apache/spark/sql/internal/SQLConf.scala | 2 +-
.../apache/spark/sql/sources/interfaces.scala | 3 +-
.../spark/sql/util/QueryExecutionListener.scala | 2 +-
.../apache/spark/sql/DataFrameStatSuite.scala | 2 +-
.../execution/columnar/ColumnTypeSuite.scala | 3 +-
.../hive/thriftserver/HiveThriftServer2.scala | 3 +-
.../SparkExecuteStatementOperation.scala | 2 +-
.../hive/thriftserver/SparkSQLCLIDriver.scala | 2 +-
.../sql/hive/thriftserver/SparkSQLDriver.scala | 2 +-
.../sql/hive/thriftserver/SparkSQLEnv.scala | 3 +-
.../server/SparkSQLOperationManager.scala | 2 +-
.../hive/thriftserver/ui/ThriftServerPage.scala | 2 +-
.../ui/ThriftServerSessionPage.scala | 2 +-
.../hive/thriftserver/ui/ThriftServerTab.scala | 3 +-
.../spark/sql/hive/thriftserver/CliSuite.scala | 3 +-
.../thriftserver/HiveThriftServer2Suites.scala | 3 +-
.../org/apache/spark/sql/hive/HiveCatalog.scala | 2 +-
.../org/apache/spark/sql/hive/HiveContext.scala | 3 +-
.../spark/sql/hive/HiveMetastoreCatalog.scala | 2 +-
.../org/apache/spark/sql/hive/HiveQl.scala | 2 +-
.../org/apache/spark/sql/hive/HiveShim.scala | 2 +-
.../org/apache/spark/sql/hive/SQLBuilder.scala | 2 +-
.../org/apache/spark/sql/hive/TableReader.scala | 2 +-
.../spark/sql/hive/client/HiveClientImpl.scala | 3 +-
.../apache/spark/sql/hive/client/HiveShim.scala | 2 +-
.../sql/hive/client/IsolatedClientLoader.scala | 3 +-
.../hive/execution/ScriptTransformation.scala | 3 +-
.../org/apache/spark/sql/hive/hiveUDFs.scala | 2 +-
.../spark/sql/hive/hiveWriterContainers.scala | 1 +
.../spark/sql/hive/orc/OrcFileOperator.scala | 2 +-
.../apache/spark/sql/hive/orc/OrcFilters.scala | 2 +-
.../apache/spark/sql/hive/orc/OrcRelation.scala | 2 +-
.../spark/sql/hive/HiveSparkSubmitSuite.scala | 1 +
.../spark/sql/hive/client/FiltersSuite.scala | 3 +-
.../spark/sql/hive/client/VersionsSuite.scala | 3 +-
.../org/apache/spark/streaming/Checkpoint.scala | 3 +-
.../apache/spark/streaming/DStreamGraph.scala | 2 +-
.../spark/streaming/StreamingContext.scala | 1 +
.../spark/streaming/dstream/DStream.scala | 3 +-
.../dstream/DStreamCheckpointData.scala | 2 +-
.../streaming/dstream/RawInputDStream.scala | 2 +-
.../streaming/dstream/SocketInputDStream.scala | 2 +-
.../streaming/receiver/BlockGenerator.scala | 3 +-
.../spark/streaming/receiver/RateLimiter.scala | 3 +-
.../receiver/ReceivedBlockHandler.scala | 3 +-
.../streaming/receiver/ReceiverSupervisor.scala | 3 +-
.../receiver/ReceiverSupervisorImpl.scala | 3 +-
.../streaming/scheduler/InputInfoTracker.scala | 2 +-
.../streaming/scheduler/JobGenerator.scala | 3 +-
.../streaming/scheduler/JobScheduler.scala | 2 +-
.../scheduler/ReceivedBlockTracker.scala | 3 +-
.../streaming/scheduler/ReceiverTracker.scala | 1 +
.../scheduler/rate/PIDRateEstimator.scala | 2 +-
.../spark/streaming/ui/StreamingPage.scala | 2 +-
.../spark/streaming/ui/StreamingTab.scala | 3 +-
.../streaming/util/BatchedWriteAheadLog.scala | 3 +-
.../streaming/util/FileBasedWriteAheadLog.scala | 3 +-
.../util/FileBasedWriteAheadLogReader.scala | 2 +-
.../util/RateLimitedOutputStream.scala | 2 +-
.../spark/streaming/util/RawTextSender.scala | 3 +-
.../spark/streaming/util/RecurringTimer.scala | 2 +-
.../streaming/util/WriteAheadLogUtils.scala | 3 +-
.../apache/spark/streaming/FailureSuite.scala | 1 +
.../spark/streaming/InputStreamsSuite.scala | 2 +-
.../spark/streaming/MasterFailureTest.scala | 2 +-
.../streaming/ReceivedBlockHandlerSuite.scala | 1 +
.../streaming/ReceivedBlockTrackerSuite.scala | 3 +-
.../spark/streaming/StreamingContextSuite.scala | 1 +
.../streaming/StreamingListenerSuite.scala | 2 +-
.../apache/spark/streaming/TestSuiteBase.scala | 3 +-
.../deploy/yarn/AMDelegationTokenRenewer.scala | 3 +-
.../spark/deploy/yarn/ApplicationMaster.scala | 1 +
.../org/apache/spark/deploy/yarn/Client.scala | 3 +-
.../yarn/ClientDistributedCacheManager.scala | 2 +-
.../yarn/ExecutorDelegationTokenUpdater.scala | 3 +-
.../spark/deploy/yarn/ExecutorRunnable.scala | 3 +-
.../spark/deploy/yarn/YarnAllocator.scala | 3 +-
.../apache/spark/deploy/yarn/YarnRMClient.scala | 3 +-
.../cluster/SchedulerExtensionService.scala | 3 +-
.../cluster/YarnClientSchedulerBackend.scala | 3 +-
.../cluster/YarnSchedulerBackend.scala | 3 +-
.../deploy/yarn/BaseYarnClusterSuite.scala | 1 +
.../spark/deploy/yarn/YarnClusterSuite.scala | 1 +
.../yarn/YarnShuffleIntegrationSuite.scala | 1 +
.../deploy/yarn/YarnSparkHadoopUtilSuite.scala | 3 +-
.../ExtensionServiceIntegrationSuite.scala | 3 +-
399 files changed, 742 insertions(+), 510 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/Accumulator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/Accumulator.scala b/core/src/main/scala/org/apache/spark/Accumulator.scala
index 0e4bcc3..9857529 100644
--- a/core/src/main/scala/org/apache/spark/Accumulator.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulator.scala
@@ -23,6 +23,7 @@ import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable
import scala.ref.WeakReference
+import org.apache.spark.internal.Logging
import org.apache.spark.storage.{BlockId, BlockStatus}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/ContextCleaner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ContextCleaner.scala b/core/src/main/scala/org/apache/spark/ContextCleaner.scala
index 17014e4..8fc657c 100644
--- a/core/src/main/scala/org/apache/spark/ContextCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/ContextCleaner.scala
@@ -23,6 +23,7 @@ import java.util.concurrent.{ConcurrentLinkedQueue, ScheduledExecutorService, Ti
import scala.collection.JavaConverters._
import org.apache.spark.broadcast.Broadcast
+import org.apache.spark.internal.Logging
import org.apache.spark.rdd.{RDD, ReliableRDDCheckpointData}
import org.apache.spark.util.{ThreadUtils, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
index 9b8279f..0926d05 100644
--- a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
+++ b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala
@@ -24,6 +24,7 @@ import scala.util.control.ControlThrowable
import com.codahale.metrics.{Gauge, MetricRegistry}
+import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.Source
import org.apache.spark.scheduler._
import org.apache.spark.util.{Clock, SystemClock, ThreadUtils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala b/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
index 7f474ed..e8748dd 100644
--- a/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
+++ b/core/src/main/scala/org/apache/spark/HeartbeatReceiver.scala
@@ -22,6 +22,7 @@ import java.util.concurrent.{ScheduledFuture, TimeUnit}
import scala.collection.mutable
import scala.concurrent.Future
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc.{RpcCallContext, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.scheduler._
import org.apache.spark.storage.BlockManagerId
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/HttpServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala
index 3c80842..9fad1f6 100644
--- a/core/src/main/scala/org/apache/spark/HttpServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpServer.scala
@@ -28,6 +28,7 @@ import org.eclipse.jetty.servlet.{DefaultServlet, ServletContextHandler, Servlet
import org.eclipse.jetty.util.security.{Constraint, Password}
import org.eclipse.jetty.util.thread.QueuedThreadPool
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/Logging.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala
deleted file mode 100644
index efab61e..0000000
--- a/core/src/main/scala/org/apache/spark/Logging.scala
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark
-
-import org.apache.log4j.{Level, LogManager, PropertyConfigurator}
-import org.slf4j.{Logger, LoggerFactory}
-import org.slf4j.impl.StaticLoggerBinder
-
-import org.apache.spark.util.Utils
-
-/**
- * Utility trait for classes that want to log data. Creates a SLF4J logger for the class and allows
- * logging messages at different levels using methods that only evaluate parameters lazily if the
- * log level is enabled.
- */
-private[spark] trait Logging {
-
- // Make the log field transient so that objects with Logging can
- // be serialized and used on another machine
- @transient private var log_ : Logger = null
-
- // Method to get the logger name for this object
- protected def logName = {
- // Ignore trailing $'s in the class names for Scala objects
- this.getClass.getName.stripSuffix("$")
- }
-
- // Method to get or create the logger for this object
- protected def log: Logger = {
- if (log_ == null) {
- initializeLogIfNecessary(false)
- log_ = LoggerFactory.getLogger(logName)
- }
- log_
- }
-
- // Log methods that take only a String
- protected def logInfo(msg: => String) {
- if (log.isInfoEnabled) log.info(msg)
- }
-
- protected def logDebug(msg: => String) {
- if (log.isDebugEnabled) log.debug(msg)
- }
-
- protected def logTrace(msg: => String) {
- if (log.isTraceEnabled) log.trace(msg)
- }
-
- protected def logWarning(msg: => String) {
- if (log.isWarnEnabled) log.warn(msg)
- }
-
- protected def logError(msg: => String) {
- if (log.isErrorEnabled) log.error(msg)
- }
-
- // Log methods that take Throwables (Exceptions/Errors) too
- protected def logInfo(msg: => String, throwable: Throwable) {
- if (log.isInfoEnabled) log.info(msg, throwable)
- }
-
- protected def logDebug(msg: => String, throwable: Throwable) {
- if (log.isDebugEnabled) log.debug(msg, throwable)
- }
-
- protected def logTrace(msg: => String, throwable: Throwable) {
- if (log.isTraceEnabled) log.trace(msg, throwable)
- }
-
- protected def logWarning(msg: => String, throwable: Throwable) {
- if (log.isWarnEnabled) log.warn(msg, throwable)
- }
-
- protected def logError(msg: => String, throwable: Throwable) {
- if (log.isErrorEnabled) log.error(msg, throwable)
- }
-
- protected def isTraceEnabled(): Boolean = {
- log.isTraceEnabled
- }
-
- protected def initializeLogIfNecessary(isInterpreter: Boolean): Unit = {
- if (!Logging.initialized) {
- Logging.initLock.synchronized {
- if (!Logging.initialized) {
- initializeLogging(isInterpreter)
- }
- }
- }
- }
-
- private def initializeLogging(isInterpreter: Boolean): Unit = {
- // Don't use a logger in here, as this is itself occurring during initialization of a logger
- // If Log4j 1.2 is being used, but is not initialized, load a default properties file
- val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr
- // This distinguishes the log4j 1.2 binding, currently
- // org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently
- // org.apache.logging.slf4j.Log4jLoggerFactory
- val usingLog4j12 = "org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
- if (usingLog4j12) {
- val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
- // scalastyle:off println
- if (!log4j12Initialized) {
- val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
- Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
- case Some(url) =>
- PropertyConfigurator.configure(url)
- System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
- case None =>
- System.err.println(s"Spark was unable to load $defaultLogProps")
- }
- }
-
- if (isInterpreter) {
- // Use the repl's main class to define the default log level when running the shell,
- // overriding the root logger's config if they're different.
- val rootLogger = LogManager.getRootLogger()
- val replLogger = LogManager.getLogger(logName)
- val replLevel = Option(replLogger.getLevel()).getOrElse(Level.WARN)
- if (replLevel != rootLogger.getEffectiveLevel()) {
- System.err.printf("Setting default log level to \"%s\".\n", replLevel)
- System.err.println("To adjust logging level use sc.setLogLevel(newLevel).")
- rootLogger.setLevel(replLevel)
- }
- }
- // scalastyle:on println
- }
- Logging.initialized = true
-
- // Force a call into slf4j to initialize it. Avoids this happening from multiple threads
- // and triggering this: http://mailman.qos.ch/pipermail/slf4j-dev/2010-April/002956.html
- log
- }
-}
-
-private object Logging {
- @volatile private var initialized = false
- val initLock = new Object()
- try {
- // We use reflection here to handle the case where users remove the
- // slf4j-to-jul bridge order to route their logs to JUL.
- val bridgeClass = Utils.classForName("org.slf4j.bridge.SLF4JBridgeHandler")
- bridgeClass.getMethod("removeHandlersForRootLogger").invoke(null)
- val installed = bridgeClass.getMethod("isInstalled").invoke(null).asInstanceOf[Boolean]
- if (!installed) {
- bridgeClass.getMethod("install").invoke(null)
- }
- } catch {
- case e: ClassNotFoundException => // can't log anything yet so just fail silently
- }
-}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 9cb6159..3a5caa3 100644
--- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -25,6 +25,7 @@ import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map}
import scala.reflect.ClassTag
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler.MapStatus
import org.apache.spark.shuffle.MetadataFetchFailedException
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/SSLOptions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SSLOptions.scala b/core/src/main/scala/org/apache/spark/SSLOptions.scala
index d755f07..30db6cc 100644
--- a/core/src/main/scala/org/apache/spark/SSLOptions.scala
+++ b/core/src/main/scala/org/apache/spark/SSLOptions.scala
@@ -23,6 +23,8 @@ import javax.net.ssl.SSLContext
import org.eclipse.jetty.util.ssl.SslContextFactory
+import org.apache.spark.internal.Logging
+
/**
* SSLOptions class is a common container for SSL configuration options. It offers methods to
* generate specific objects to configure SSL for different communication protocols.
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/SecurityManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala
index 6132fa3..e8f6822 100644
--- a/core/src/main/scala/org/apache/spark/SecurityManager.scala
+++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
@@ -28,6 +28,7 @@ import com.google.common.io.Files
import org.apache.hadoop.io.Text
import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.internal.Logging
import org.apache.spark.network.sasl.SecretKeyHolder
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/SparkConf.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala
index 22e9c5e..5da2e98 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -17,15 +17,15 @@
package org.apache.spark
-import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
+import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import scala.collection.mutable.LinkedHashSet
import org.apache.avro.{Schema, SchemaNormalization}
+import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.{ConfigEntry, OptionalConfigEntry}
-import org.apache.spark.network.util.JavaUtils
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 5c7ae57..d2cf3bf 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -50,6 +50,7 @@ import org.apache.spark.broadcast.Broadcast
import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil}
import org.apache.spark.input.{FixedLengthBinaryInputFormat, PortableDataStream, StreamInputFormat,
WholeTextFileInputFormat}
+import org.apache.spark.internal.Logging
import org.apache.spark.io.CompressionCodec
import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
import org.apache.spark.rdd._
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/SparkEnv.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 668a913..459fab8 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -28,6 +28,7 @@ import com.google.common.collect.MapMaker
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.api.python.PythonWorkerFactory
import org.apache.spark.broadcast.BroadcastManager
+import org.apache.spark.internal.Logging
import org.apache.spark.memory.{MemoryManager, StaticMemoryManager, UnifiedMemoryManager}
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.network.BlockTransferService
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 5864786..17daac1 100644
--- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapred._
import org.apache.hadoop.mapreduce.TaskType
+import org.apache.spark.internal.Logging
import org.apache.spark.mapred.SparkHadoopMapRedUtil
import org.apache.spark.rdd.HadoopRDD
import org.apache.spark.util.SerializableJobConf
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala b/core/src/main/scala/org/apache/spark/TaskContextImpl.scala
index 7e96040..c9354b3 100644
--- a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala
+++ b/core/src/main/scala/org/apache/spark/TaskContextImpl.scala
@@ -20,6 +20,7 @@ package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.internal.Logging
import org.apache.spark.memory.TaskMemoryManager
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.metrics.source.Source
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/TaskEndReason.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/TaskEndReason.scala b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
index 509fb2e..83af226 100644
--- a/core/src/main/scala/org/apache/spark/TaskEndReason.scala
+++ b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
@@ -23,6 +23,7 @@ import scala.util.Try
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.AccumulableInfo
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala b/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala
index 164e950..6c40722 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonGatewayServer.scala
@@ -22,7 +22,7 @@ import java.net.Socket
import py4j.GatewayServer
-import org.apache.spark.Logging
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala b/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala
index d2beef2..6f67306 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala
@@ -23,8 +23,9 @@ import scala.util.{Failure, Success, Try}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io._
-import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.SparkException
import org.apache.spark.broadcast.Broadcast
+import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.util.{SerializableConfiguration, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 8f30677..f423b2e 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -36,6 +36,7 @@ import org.apache.spark._
import org.apache.spark.api.java.{JavaPairRDD, JavaRDD, JavaSparkContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.input.PortableDataStream
+import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.util.{SerializableConfiguration, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index 433764b..3df87f6 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -26,6 +26,7 @@ import scala.collection.mutable
import scala.collection.JavaConverters._
import org.apache.spark._
+import org.apache.spark.internal.Logging
import org.apache.spark.util.{RedirectThread, Utils}
private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String, String])
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala b/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
index 55db938..1c632eb 100644
--- a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala
@@ -28,8 +28,9 @@ import scala.util.Try
import net.razorvine.pickle.{Pickler, Unpickler}
-import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.SparkException
import org.apache.spark.api.java.JavaRDD
+import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
/** Utilities for serialization / deserialization between Python and Java, using Pickle. */
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala b/core/src/main/scala/org/apache/spark/api/r/RBackend.scala
index 8b3be0d..9d29a84 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RBackend.scala
@@ -29,7 +29,8 @@ import io.netty.channel.socket.nio.NioServerSocketChannel
import io.netty.handler.codec.LengthFieldBasedFrameDecoder
import io.netty.handler.codec.bytes.{ByteArrayDecoder, ByteArrayEncoder}
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
+import org.apache.spark.internal.Logging
/**
* Netty-based backend server that is used to communicate between R and Java.
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
index 9bddd72..c416e83 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
@@ -25,8 +25,8 @@ import scala.language.existentials
import io.netty.channel.{ChannelHandlerContext, SimpleChannelInboundHandler}
import io.netty.channel.ChannelHandler.Sharable
-import org.apache.spark.Logging
import org.apache.spark.api.r.SerDe._
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
index 401f362..588a57e 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala
@@ -29,6 +29,7 @@ import scala.util.Try
import org.apache.spark._
import org.apache.spark.api.java.{JavaPairRDD, JavaRDD, JavaSparkContext}
import org.apache.spark.broadcast.Broadcast
+import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
index 0d68872..24d953e 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
@@ -21,8 +21,8 @@ import java.io.Serializable
import scala.reflect.ClassTag
-import org.apache.spark.Logging
import org.apache.spark.SparkException
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala
index be416c4..e88988f 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala
@@ -21,7 +21,8 @@ import java.util.concurrent.atomic.AtomicLong
import scala.reflect.ClassTag
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.internal.Logging
private[spark] class BroadcastManager(
val isDriver: Boolean,
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
index 8091aa8..2634d88 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala
@@ -25,6 +25,7 @@ import scala.reflect.ClassTag
import scala.util.Random
import org.apache.spark._
+import org.apache.spark.internal.Logging
import org.apache.spark.io.CompressionCodec
import org.apache.spark.serializer.Serializer
import org.apache.spark.storage.{BlockId, BroadcastBlockId, StorageLevel}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/Client.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala
index dcef03e..640f25f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala
@@ -22,11 +22,12 @@ import scala.concurrent.ExecutionContext
import scala.reflect.ClassTag
import scala.util.{Failure, Success}
-import org.apache.log4j.{Level, Logger}
+import org.apache.log4j.Logger
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.{DriverState, Master}
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.util.{SparkExitCode, ThreadUtils, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala
index c514a1a..adc0de1 100644
--- a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala
@@ -21,7 +21,8 @@ import java.util.concurrent.CountDownLatch
import scala.collection.JavaConverters._
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
+import org.apache.spark.internal.Logging
import org.apache.spark.network.TransportContext
import org.apache.spark.network.netty.SparkTransportConf
import org.apache.spark.network.sasl.SaslServerBootstrap
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
index 305994a..abb98f9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala
@@ -32,8 +32,9 @@ import scala.sys.process._
import org.json4s._
import org.json4s.jackson.JsonMethods
-import org.apache.spark.{Logging, SparkConf, SparkContext}
+import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.master.RecoveryState
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index 2dfb813..84aa894 100644
--- a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -19,9 +19,10 @@ package org.apache.spark.deploy
import scala.collection.mutable.ArrayBuffer
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
import org.apache.spark.deploy.master.Master
import org.apache.spark.deploy.worker.Worker
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
index 81718e0..3d2cabc 100644
--- a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala
@@ -26,8 +26,8 @@ import scala.collection.JavaConverters._
import com.google.common.io.{ByteStreams, Files}
-import org.apache.spark.Logging
import org.apache.spark.api.r.RUtils
+import org.apache.spark.internal.Logging
import org.apache.spark.util.{RedirectThread, Utils}
private[deploy] object RPackageUtils extends Logging {
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala
index 8d5e716..8247110 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala
@@ -23,7 +23,8 @@ import org.apache.curator.framework.{CuratorFramework, CuratorFrameworkFactory}
import org.apache.curator.retry.ExponentialBackoffRetry
import org.apache.zookeeper.KeeperException
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
+import org.apache.spark.internal.Logging
private[spark] object SparkCuratorUtil extends Logging {
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 270ca84..06b7b38 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -35,8 +35,9 @@ import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifie
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
-import org.apache.spark.{Logging, SparkConf, SparkException}
+import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala
index b9dec62..43b17e5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala
@@ -23,12 +23,13 @@ import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference}
import scala.util.control.NonFatal
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.Master
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc._
-import org.apache.spark.util.{RpcUtils, ThreadUtils, Utils}
+import org.apache.spark.util.{RpcUtils, ThreadUtils}
/**
* Interface allowing applications to speak with a Spark deploy cluster. Takes a master URL,
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
index 000f7e8..a370526 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
@@ -28,7 +28,7 @@ import com.codahale.metrics.{Counter, MetricRegistry, Timer}
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache, RemovalListener, RemovalNotification}
import org.eclipse.jetty.servlet.FilterHolder
-import org.apache.spark.Logging
+import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.Source
import org.apache.spark.ui.SparkUI
import org.apache.spark.util.Clock
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
index f885798..d5afb33 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala
@@ -31,8 +31,9 @@ import org.apache.hadoop.hdfs.DistributedFileSystem
import org.apache.hadoop.hdfs.protocol.HdfsConstants
import org.apache.hadoop.security.AccessControlException
-import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkException}
+import org.apache.spark.{SecurityManager, SparkConf, SparkException}
import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.internal.Logging
import org.apache.spark.scheduler._
import org.apache.spark.ui.SparkUI
import org.apache.spark.util.{Clock, SystemClock, ThreadUtils, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index 076bdc5..d821474 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -25,8 +25,9 @@ import scala.util.control.NonFatal
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.internal.Logging
import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationInfo, ApplicationsListResource, UIRoot}
import org.apache.spark.ui.{SparkUI, UIUtils, WebUI}
import org.apache.spark.ui.JettyUtils._
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
index fc3790f..2eddb5f 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala
@@ -19,7 +19,8 @@ package org.apache.spark.deploy.history
import scala.annotation.tailrec
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
index 1aa8cd5..f2b5ea7 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
@@ -21,7 +21,7 @@ import java.io._
import scala.reflect.ClassTag
-import org.apache.spark.Logging
+import org.apache.spark.internal.Logging
import org.apache.spark.serializer.{DeserializationStream, SerializationStream, Serializer}
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index c97ad4d..01901bb 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -31,7 +31,7 @@ import scala.util.Random
import org.apache.hadoop.fs.Path
-import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkException}
+import org.apache.spark.{SecurityManager, SparkConf, SparkException}
import org.apache.spark.deploy.{ApplicationDescription, DriverDescription,
ExecutorState, SparkHadoopUtil}
import org.apache.spark.deploy.DeployMessages._
@@ -40,6 +40,7 @@ import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.deploy.master.MasterMessages._
import org.apache.spark.deploy.master.ui.MasterWebUI
import org.apache.spark.deploy.rest.StandaloneRestServer
+import org.apache.spark.internal.Logging
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.rpc._
import org.apache.spark.scheduler.{EventLoggingListener, ReplayListenerBus}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/master/RecoveryModeFactory.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/RecoveryModeFactory.scala b/core/src/main/scala/org/apache/spark/deploy/master/RecoveryModeFactory.scala
index c4c3283..ffdd635 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/RecoveryModeFactory.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/RecoveryModeFactory.scala
@@ -17,8 +17,9 @@
package org.apache.spark.deploy.master
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.internal.Logging
import org.apache.spark.serializer.Serializer
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
index 336cb24..1e8dabf 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala
@@ -20,8 +20,9 @@ package org.apache.spark.deploy.master
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.recipes.leader.{LeaderLatch, LeaderLatchListener}
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkCuratorUtil
+import org.apache.spark.internal.Logging
private[master] class ZooKeeperLeaderElectionAgent(val masterInstance: LeaderElectable,
conf: SparkConf) extends LeaderLatchListener with LeaderElectionAgent with Logging {
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
index b0cedef..79f7721 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala
@@ -25,8 +25,9 @@ import scala.reflect.ClassTag
import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.CreateMode
-import org.apache.spark.{Logging, SparkConf}
+import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkCuratorUtil
+import org.apache.spark.internal.Logging
import org.apache.spark.serializer.Serializer
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index d754392..ae16ce9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -17,8 +17,8 @@
package org.apache.spark.deploy.master.ui
-import org.apache.spark.Logging
import org.apache.spark.deploy.master.Master
+import org.apache.spark.internal.Logging
import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationInfo, ApplicationsListResource,
UIRoot}
import org.apache.spark.ui.{SparkUI, WebUI}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcher.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcher.scala b/core/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcher.scala
index 7091513..a057977 100644
--- a/core/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcher.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/mesos/MesosClusterDispatcher.scala
@@ -19,9 +19,10 @@ package org.apache.spark.deploy.mesos
import java.util.concurrent.CountDownLatch
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.mesos.ui.MesosClusterUI
import org.apache.spark.deploy.rest.mesos.MesosRestServer
+import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.cluster.mesos._
import org.apache.spark.util.{ShutdownHookManager, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala b/core/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala
index c0f9129..6b297c4 100644
--- a/core/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala
@@ -22,8 +22,9 @@ import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
import scala.collection.JavaConverters._
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.ExternalShuffleService
+import org.apache.spark.internal.Logging
import org.apache.spark.network.client.{RpcResponseCallback, TransportClient}
import org.apache.spark.network.shuffle.ExternalShuffleBlockHandler
import org.apache.spark.network.shuffle.protocol.BlockTransferMessage
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
index d3e092a..c5a5876 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala
@@ -30,7 +30,8 @@ import scala.io.Source
import com.fasterxml.jackson.core.JsonProcessingException
-import org.apache.spark.{Logging, SPARK_VERSION => sparkVersion, SparkConf}
+import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf}
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
index 8e0862d..14244ea 100644
--- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala
@@ -29,7 +29,8 @@ import org.eclipse.jetty.util.thread.QueuedThreadPool
import org.json4s._
import org.json4s.jackson.JsonMethods._
-import org.apache.spark.{Logging, SPARK_VERSION => sparkVersion, SparkConf}
+import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf}
+import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
index ce02ee2..a4efafc 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala
@@ -22,9 +22,9 @@ import java.io.{File, FileOutputStream, InputStream, IOException}
import scala.collection.JavaConverters._
import scala.collection.Map
-import org.apache.spark.Logging
import org.apache.spark.SecurityManager
import org.apache.spark.deploy.Command
+import org.apache.spark.internal.Logging
import org.apache.spark.launcher.WorkerCommandBuilder
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
index 7f4fe26..9c6bc5c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
@@ -25,11 +25,12 @@ import scala.collection.JavaConverters._
import com.google.common.io.Files
import org.apache.hadoop.fs.Path
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.{DriverDescription, SparkHadoopUtil}
import org.apache.spark.deploy.DeployMessages.DriverStateChanged
import org.apache.spark.deploy.master.DriverState
import org.apache.spark.deploy.master.DriverState.DriverState
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.util.{Clock, SystemClock, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 208a1bb..f9c92c3 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -24,9 +24,10 @@ import scala.collection.JavaConverters._
import com.google.common.io.Files
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.util.{ShutdownHookManager, Utils}
import org.apache.spark.util.logging.FileAppender
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index c18c8c7..1b7637a 100755
--- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -29,12 +29,13 @@ import scala.concurrent.ExecutionContext
import scala.util.{Failure, Random, Success}
import scala.util.control.NonFatal
-import org.apache.spark.{Logging, SecurityManager, SparkConf}
+import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.{Command, ExecutorDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.ExternalShuffleService
import org.apache.spark.deploy.master.{DriverState, Master}
import org.apache.spark.deploy.worker.ui.WorkerWebUI
+import org.apache.spark.internal.Logging
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.rpc._
import org.apache.spark.util.{ThreadUtils, Utils}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
index ab56fde..af29de3 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
@@ -17,7 +17,7 @@
package org.apache.spark.deploy.worker
-import org.apache.spark.Logging
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc._
/**
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
index 09ae64a..6500cab 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/LogPage.scala
@@ -22,7 +22,7 @@ import javax.servlet.http.HttpServletRequest
import scala.xml.Node
-import org.apache.spark.Logging
+import org.apache.spark.internal.Logging
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.apache.spark.util.Utils
import org.apache.spark.util.logging.RollingFileAppender
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index b45b682..db696b0 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -20,8 +20,8 @@ package org.apache.spark.deploy.worker.ui
import java.io.File
import javax.servlet.http.HttpServletRequest
-import org.apache.spark.Logging
import org.apache.spark.deploy.worker.Worker
+import org.apache.spark.internal.Logging
import org.apache.spark.ui.{SparkUI, WebUI}
import org.apache.spark.ui.JettyUtils._
import org.apache.spark.util.RpcUtils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index 3b5cb18..320a200 100644
--- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@ -28,6 +28,7 @@ import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.worker.WorkerWatcher
+import org.apache.spark.internal.Logging
import org.apache.spark.rpc._
import org.apache.spark.scheduler.TaskDescription
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/executor/Executor.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index 48372d7..6327d55 100644
--- a/core/src/main/scala/org/apache/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -29,6 +29,7 @@ import scala.util.control.NonFatal
import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.internal.Logging
import org.apache.spark.memory.TaskMemoryManager
import org.apache.spark.rpc.RpcTimeout
import org.apache.spark.scheduler.{AccumulableInfo, DirectTaskResult, IndirectTaskResult, Task}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
index cfd9bcd..680cfb7 100644
--- a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
@@ -25,9 +25,10 @@ import org.apache.mesos.{Executor => MesosExecutor, ExecutorDriver, MesosExecuto
import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _}
import org.apache.mesos.protobuf.ByteString
-import org.apache.spark.{Logging, SparkConf, SparkEnv, TaskState}
+import org.apache.spark.{SparkConf, SparkEnv, TaskState}
import org.apache.spark.TaskState.TaskState
import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.cluster.mesos.MesosTaskLaunchData
import org.apache.spark.util.Utils
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
index 9da9cb5..02219a8 100644
--- a/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
+++ b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
@@ -22,6 +22,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark._
import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.AccumulableInfo
import org.apache.spark.storage.{BlockId, BlockStatus}
http://git-wip-us.apache.org/repos/asf/spark/blob/8ef3399a/core/src/main/scala/org/apache/spark/input/FixedLengthBinaryInputFormat.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/input/FixedLengthBinaryInputFormat.scala b/core/src/main/scala/org/apache/spark/input/FixedLengthBinaryInputFormat.scala
index bc98273..978afaf 100644
--- a/core/src/main/scala/org/apache/spark/input/FixedLengthBinaryInputFormat.scala
+++ b/core/src/main/scala/org/apache/spark/input/FixedLengthBinaryInputFormat.scala
@@ -22,7 +22,7 @@ import org.apache.hadoop.io.{BytesWritable, LongWritable}
import org.apache.hadoop.mapreduce.{InputSplit, JobContext, RecordReader, TaskAttemptContext}
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
-import org.apache.spark.Logging
+import org.apache.spark.internal.Logging
/**
* Custom Input Format for reading and splitting flat binary files that contain records,
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org