You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/01/05 09:39:53 UTC
spark git commit: [SPARK-12641] Remove unused code related to Hadoop
0.23
Repository: spark
Updated Branches:
refs/heads/master 53beddc5b -> 8eb2dc713
[SPARK-12641] Remove unused code related to Hadoop 0.23
Currently we don't support Hadoop 0.23 but there is a few code related to it so let's clean it up.
Author: Kousuke Saruta <sa...@oss.nttdata.co.jp>
Closes #10590 from sarutak/SPARK-12641.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8eb2dc71
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8eb2dc71
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8eb2dc71
Branch: refs/heads/master
Commit: 8eb2dc7133b4d2143adffc2bdbb61d96bd41a0ac
Parents: 53beddc
Author: Kousuke Saruta <sa...@oss.nttdata.co.jp>
Authored: Tue Jan 5 00:39:50 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Tue Jan 5 00:39:50 2016 -0800
----------------------------------------------------------------------
core/src/main/scala/org/apache/spark/util/Utils.scala | 13 +++----------
1 file changed, 3 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/8eb2dc71/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 0c1f9c1..9bdcc4d 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -662,9 +662,7 @@ private[spark] object Utils extends Logging {
private[spark] def isRunningInYarnContainer(conf: SparkConf): Boolean = {
// These environment variables are set by YARN.
- // For Hadoop 0.23.X, we check for YARN_LOCAL_DIRS (we use this below in getYarnLocalDirs())
- // For Hadoop 2.X, we check for CONTAINER_ID.
- conf.getenv("CONTAINER_ID") != null || conf.getenv("YARN_LOCAL_DIRS") != null
+ conf.getenv("CONTAINER_ID") != null
}
/**
@@ -740,17 +738,12 @@ private[spark] object Utils extends Logging {
logError(s"Failed to create local root dir in $root. Ignoring this directory.")
None
}
- }.toArray
+ }
}
/** Get the Yarn approved local directories. */
private def getYarnLocalDirs(conf: SparkConf): String = {
- // Hadoop 0.23 and 2.x have different Environment variable names for the
- // local dirs, so lets check both. We assume one of the 2 is set.
- // LOCAL_DIRS => 2.X, YARN_LOCAL_DIRS => 0.23.X
- val localDirs = Option(conf.getenv("YARN_LOCAL_DIRS"))
- .getOrElse(Option(conf.getenv("LOCAL_DIRS"))
- .getOrElse(""))
+ val localDirs = Option(conf.getenv("LOCAL_DIRS")).getOrElse("")
if (localDirs.isEmpty) {
throw new Exception("Yarn Local dirs can't be empty")
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org