You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/07/14 10:47:07 UTC

svn commit: r421837 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/dfs/DataNode.java src/java/org/apache/hadoop/dfs/NameNode.java src/java/org/apache/hadoop/ipc/RPC.java

Author: cutting
Date: Fri Jul 14 01:47:07 2006
New Revision: 421837

URL: http://svn.apache.org/viewvc?rev=421837&view=rev
Log:
HADOOP-354.  Make public methods to stop DFS daemons.  Contributed by Barry Kaplan.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=421837&r1=421836&r2=421837&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Fri Jul 14 01:47:07 2006
@@ -34,6 +34,9 @@
  9. HADOOP-361.  Remove unix dependencies from streaming contrib
     module tests, making them pure java. (Michel Tourn via cutting)
 
+10. HADOOP-354.  Make public methods to stop DFS daemons.
+   (Barry Kaplan via cutting)
+
 
 Release 0.4.0 - 2006-06-28
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java?rev=421837&r1=421836&r2=421837&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java Fri Jul 14 01:47:07 2006
@@ -82,7 +82,7 @@
         return new InetSocketAddress(host, port);
     }
 
-    private static Vector subThreadList = null;
+    private static Map subDataNodeList = null;
     DatanodeProtocol namenode;
     FSDataset data;
     DatanodeRegistration dnRegistration;
@@ -193,7 +193,7 @@
      * Shut down this instance of the datanode.
      * Returns only after shutdown is complete.
      */
-    void shutdown() {
+    public void shutdown() {
         this.shouldRun = false;
         ((DataXceiveServer) this.dataXceiveServer.getRunnable()).kill();
         try {
@@ -202,6 +202,19 @@
         }
     }
 
+    /**
+     * Shut down all datanodes that where started via the run(conf) method.
+     * Returns only after shutdown is complete.
+     */
+    public static void shutdownAll(){
+      if(subDataNodeList != null && !subDataNodeList.isEmpty()){
+        for (Iterator iterator = subDataNodeList.keySet().iterator(); iterator.hasNext();) {
+          DataNode dataNode = (DataNode) iterator.next();
+          dataNode.shutdown();
+        }
+      }
+    }
+
     void handleDiskError( String errMsgr ) {
         LOG.warn( "DataNode is shutting down.\n" + errMsgr );
         try {
@@ -880,14 +893,14 @@
      */
     public static void run(Configuration conf) throws IOException {
         String[] dataDirs = conf.getStrings("dfs.data.dir");
-        subThreadList = new Vector(dataDirs.length);
+        subDataNodeList = new HashMap(dataDirs.length);
         for (int i = 0; i < dataDirs.length; i++) {
           DataNode dn = makeInstanceForDir(dataDirs[i], conf);
           if (dn != null) {
             Thread t = new Thread(dn, "DataNode: "+dataDirs[i]);
             t.setDaemon(true); // needed for JUnit testing
             t.start();
-            subThreadList.add(t);
+            subDataNodeList.put(dn,t);
           }
         }
     }
@@ -901,7 +914,7 @@
     run(conf);
 
     //  Wait for sub threads to exit
-    for (Iterator iterator = subThreadList.iterator(); iterator.hasNext();) {
+    for (Iterator iterator = subDataNodeList.entrySet().iterator(); iterator.hasNext();) {
       Thread threadDataNode = (Thread) iterator.next();
       try {
         threadDataNode.join();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java?rev=421837&r1=421836&r2=421837&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java Fri Jul 14 01:47:07 2006
@@ -114,9 +114,8 @@
 
     /**
      * Stop all NameNode threads and wait for all to finish.
-     * Package-only access since this is intended for JUnit testing.
     */
-    void stop() {
+    public void stop() {
       if (! stopRequested) {
         stopRequested = true;
         namesystem.close();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java?rev=421837&r1=421836&r2=421837&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/RPC.java Fri Jul 14 01:47:07 2006
@@ -135,6 +135,14 @@
     return CLIENT;
   }
 
+  /**
+   * Stop all RPC client connections
+   */
+  public static synchronized void stopClient(){
+    if(CLIENT != null)
+      CLIENT.stop();
+  }
+
   private static class Invoker implements InvocationHandler {
     private InetSocketAddress address;
     private Client client;