You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by as...@apache.org on 2016/12/18 02:16:55 UTC
[21/29] hadoop git commit: HADOOP-13709. Ability to clean up
subprocesses spawned by Shell when the process exits. Contributed by Eric
Badger
HADOOP-13709. Ability to clean up subprocesses spawned by Shell when the process exits. Contributed by Eric Badger
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/631f1dae
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/631f1dae
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/631f1dae
Branch: refs/heads/YARN-5085
Commit: 631f1daee3507a1adbc68b937cca31c27dbe8d3d
Parents: 169bfc0
Author: Jason Lowe <jl...@apache.org>
Authored: Thu Dec 15 20:52:40 2016 +0000
Committer: Jason Lowe <jl...@apache.org>
Committed: Thu Dec 15 20:52:40 2016 +0000
----------------------------------------------------------------------
.../main/java/org/apache/hadoop/util/Shell.java | 24 +++++++++
.../java/org/apache/hadoop/util/TestShell.java | 52 ++++++++++++++++++++
2 files changed, 76 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/631f1dae/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
index 5fc9869..83877b7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
@@ -26,9 +26,11 @@ import java.io.InputStream;
import java.io.InterruptedIOException;
import java.nio.charset.Charset;
import java.util.Arrays;
+import java.util.Collections;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
+import java.util.WeakHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.annotations.VisibleForTesting;
@@ -48,6 +50,8 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class Shell {
+ private static final Map <Process, Object> CHILD_PROCESSES =
+ Collections.synchronizedMap(new WeakHashMap<Process, Object>());
public static final Logger LOG = LoggerFactory.getLogger(Shell.class);
/**
@@ -916,6 +920,7 @@ public abstract class Shell {
} else {
process = builder.start();
}
+ CHILD_PROCESSES.put(process, null);
if (timeOutInterval > 0) {
timeOutTimer = new Timer("Shell command timeout");
@@ -1012,6 +1017,7 @@ public abstract class Shell {
LOG.warn("Error while closing the error stream", ioe);
}
process.destroy();
+ CHILD_PROCESSES.remove(process);
lastTime = Time.monotonicNow();
}
}
@@ -1310,4 +1316,22 @@ public abstract class Shell {
}
}
}
+
+ /**
+ * Static method to destroy all running <code>Shell</code> processes
+ * Iterates through a list of all currently running <code>Shell</code>
+ * processes and destroys them one by one. This method is thread safe and
+ * is intended to be used in a shutdown hook.
+ */
+ public static void destroyAllProcesses() {
+ synchronized (CHILD_PROCESSES) {
+ for (Process key : CHILD_PROCESSES.keySet()) {
+ Process process = key;
+ if (key != null) {
+ process.destroy();
+ }
+ }
+ CHILD_PROCESSES.clear();
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/631f1dae/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
index 67903f7..88859b5 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.util;
+import com.google.common.base.Supplier;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider;
import org.junit.Assert;
@@ -471,4 +472,55 @@ public class TestShell extends Assert {
assertEquals("'foo'\\''bar'", Shell.bashQuote("foo'bar"));
assertEquals("''\\''foo'\\''bar'\\'''", Shell.bashQuote("'foo'bar'"));
}
+
+ @Test(timeout=120000)
+ public void testShellKillAllProcesses() throws Throwable {
+ Assume.assumeFalse(WINDOWS);
+ StringBuffer sleepCommand = new StringBuffer();
+ sleepCommand.append("sleep 200");
+ String[] shellCmd = {"bash", "-c", sleepCommand.toString()};
+ final ShellCommandExecutor shexc1 = new ShellCommandExecutor(shellCmd);
+ final ShellCommandExecutor shexc2 = new ShellCommandExecutor(shellCmd);
+
+ Thread shellThread1 = new Thread() {
+ @Override
+ public void run() {
+ try {
+ shexc1.execute();
+ } catch(IOException ioe) {
+ //ignore IOException from thread interrupt
+ }
+ }
+ };
+ Thread shellThread2 = new Thread() {
+ @Override
+ public void run() {
+ try {
+ shexc2.execute();
+ } catch(IOException ioe) {
+ //ignore IOException from thread interrupt
+ }
+ }
+ };
+
+ shellThread1.start();
+ shellThread2.start();
+ GenericTestUtils.waitFor(new Supplier<Boolean>() {
+ @Override
+ public Boolean get() {
+ return shexc1.getProcess() != null;
+ }
+ }, 10, 10000);
+
+ GenericTestUtils.waitFor(new Supplier<Boolean>() {
+ @Override
+ public Boolean get() {
+ return shexc2.getProcess() != null;
+ }
+ }, 10, 10000);
+
+ Shell.destroyAllProcesses();
+ shexc1.getProcess().waitFor();
+ shexc2.getProcess().waitFor();
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org