You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ac...@apache.org on 2009/08/14 10:16:10 UTC
svn commit: r804115 - in /hadoop/common/trunk: CHANGES.txt
src/java/org/apache/hadoop/util/Shell.java
Author: acmurthy
Date: Fri Aug 14 08:16:10 2009
New Revision: 804115
URL: http://svn.apache.org/viewvc?rev=804115&view=rev
Log:
HADOOP-6192. Fix Shell.getUlimitMemoryCommand to not rely on Map-Reduce specific configs.
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/util/Shell.java
Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=804115&r1=804114&r2=804115&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Fri Aug 14 08:16:10 2009
@@ -917,6 +917,9 @@
HADOOP-6188. TestTrash uses java.io.File api but not hadoop FileSystem api.
(Boris Shkolnik via szetszwo)
+ HADOOP-6192. Fix Shell.getUlimitMemoryCommand to not rely on Map-Reduce
+ specific configs. (acmurthy)
+
Release 0.20.1 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/util/Shell.java?rev=804115&r1=804114&r2=804115&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/util/Shell.java Fri Aug 14 08:16:10 2009
@@ -63,6 +63,31 @@
/** If or not script timed out*/
private AtomicBoolean timedOut;
+ /** a Unix command to get ulimit of a process. */
+ public static final String ULIMIT_COMMAND = "ulimit";
+
+ /**
+ * Get the Unix command for setting the maximum virtual memory available
+ * to a given child process. This is only relevant when we are forking a
+ * process from within the Mapper or the Reducer implementations.
+ * Also see Hadoop Pipes and Hadoop Streaming.
+ *
+ * It also checks to ensure that we are running on a *nix platform else
+ * (e.g. in Cygwin/Windows) it returns <code>null</code>.
+ * @param memoryLimit virtual memory limit
+ * @return a <code>String[]</code> with the ulimit command arguments or
+ * <code>null</code> if we are running on a non *nix platform or
+ * if the limit is unspecified.
+ */
+ public static String[] getUlimitMemoryCommand(int memoryLimit) {
+ // ulimit isn't supported on Windows
+ if (WINDOWS) {
+ return null;
+ }
+
+ return new String[] {ULIMIT_COMMAND, "-v", String.valueOf(memoryLimit)};
+ }
+
/**
* Get the Unix command for setting the maximum virtual memory available
* to a given child process. This is only relevant when we are forking a
@@ -75,7 +100,9 @@
* @return a <code>String[]</code> with the ulimit command arguments or
* <code>null</code> if we are running on a non *nix platform or
* if the limit is unspecified.
+ * @deprecated Use {@link #getUlimitMemoryCommand(int)}
*/
+ @Deprecated
public static String[] getUlimitMemoryCommand(Configuration conf) {
// ulimit isn't supported on Windows
if (WINDOWS) {
@@ -90,8 +117,8 @@
// Parse it to ensure it is legal/sane
int memoryLimit = Integer.valueOf(ulimit);
-
- return new String[] {"ulimit", "-v", String.valueOf(memoryLimit)};
+
+ return getUlimitMemoryCommand(memoryLimit);
}
/** Set to true on Windows platforms */