You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:51:40 UTC

svn commit: r1077752 - in /hadoop/common/branches/branch-0.20-security-patches: ./ ivy/ src/test/system/java/org/apache/hadoop/mapred/ src/test/system/java/org/apache/hadoop/util/ src/test/system/java/shared/org/apache/hadoop/common/ src/test/system/va...

Author: omalley
Date: Fri Mar  4 04:51:39 2011
New Revision: 1077752

URL: http://svn.apache.org/viewvc?rev=1077752&view=rev
Log:
commit 98ad5d059d8f470e14041f4a8fe5b8fa740c1410
Author: Konstantin Boudnik <co...@apache.org>
Date:   Mon Sep 20 18:00:08 2010 -0700

    HADOOP-6879 from https://issues.apache.org/jira/secure/attachment/12455083/HADOOP-6879.y20.patch
    
    +++ b/YAHOO-CHANGES.txt
    +    HADOOP-6879. Provide SSH based (Jsch) remote execution API for system
    +    tests. (cos)

Added:
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/RemoteExecution.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/SSHRemoteExecution.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/util/
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/util/TestSSHRemoteExecution.java
Removed:
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/shared/org/apache/hadoop/common/RemoteExecution.java
Modified:
    hadoop/common/branches/branch-0.20-security-patches/ivy.xml
    hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestChildsKillingOfSuspendTask.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCorruptedDiskJob.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestNodeDecommissioning.java

Modified: hadoop/common/branches/branch-0.20-security-patches/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy.xml?rev=1077752&r1=1077751&r2=1077752&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy.xml Fri Mar  4 04:51:39 2011
@@ -175,12 +175,6 @@
       rev="${commons-daemon.version}"
       conf="server->default" /> 
 
-    <dependency org="com.jcraft"
-      name="jsch"
-      rev="${jsch.version}"
-      conf="common->master">
-    </dependency>
-    
     <!--Configuration: commons-logging -->
 
     <!--it is essential that only the master JAR of commons logging
@@ -282,6 +276,11 @@
        rev="${mockito-all.version}" 
        conf="common->default">
     </dependency>
+    <dependency org="com.jcraft"
+      name="jsch"
+      rev="${jsch.version}"
+      conf="common->default">
+    </dependency>
     <dependency org="org.aspectj"
       name="aspectjrt"
       rev="${aspectj.version}"

Modified: hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties?rev=1077752&r1=1077751&r2=1077752&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties Fri Mar  4 04:51:39 2011
@@ -25,7 +25,6 @@ aspectj.version=1.6.5
 
 checkstyle.version=4.2
 
-jsch.version=0.1.42
 commons-cli.version=1.2
 commons-codec.version=1.4
 commons-collections.version=3.1
@@ -67,6 +66,7 @@ log4j.version=1.2.15
 lucene-core.version=2.3.1
 
 mockito-all.version=1.8.5
+jsch.version=0.1.42
 
 oro.version=2.0.8
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestChildsKillingOfSuspendTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestChildsKillingOfSuspendTask.java?rev=1077752&r1=1077751&r2=1077752&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestChildsKillingOfSuspendTask.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestChildsKillingOfSuspendTask.java Fri Mar  4 04:51:39 2011
@@ -19,6 +19,7 @@ package org.apache.hadoop.mapred;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.util.RemoteExecution;
 import org.junit.Test;
 import org.junit.Assert;
 import org.junit.AfterClass;
@@ -30,7 +31,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.Log;
-import org.apache.hadoop.common.RemoteExecution;
+import org.apache.hadoop.util.SSHRemoteExecution;
 
 import java.util.Collection;
 import java.util.Hashtable;
@@ -302,7 +303,8 @@ public class TestChildsKillingOfSuspendT
     }
     public void run() {
       try {
-        RemoteExecution.executeCommand(hostName, userName, cmd);
+        RemoteExecution rExec = new SSHRemoteExecution();
+        rExec.executeCommand(hostName, userName, cmd);
         exitStatus = true;
       } catch(InterruptedException iexp) {
         LOG.warn("Thread is interrupted:" + iexp.getMessage());

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCorruptedDiskJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCorruptedDiskJob.java?rev=1077752&r1=1077751&r2=1077752&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCorruptedDiskJob.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestCorruptedDiskJob.java Fri Mar  4 04:51:39 2011
@@ -19,9 +19,11 @@
 package org.apache.hadoop.mapred;
 
 import org.apache.commons.logging.Log;
-import com.jcraft.jsch.*;
+
 import java.util.List;
-import org.apache.hadoop.common.RemoteExecution;
+
+import org.apache.hadoop.util.RemoteExecution;
+import org.apache.hadoop.util.SSHRemoteExecution;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -109,7 +111,8 @@ public class TestCorruptedDiskJob {
       TTClient ttClient = (TTClient)ttClients.get(i);
       String ttClientHostName = ttClient.getHostName();
       try {
-        RemoteExecution.executeCommand(ttClientHostName, userName,
+        RemoteExecution rExec = new SSHRemoteExecution();
+        rExec.executeCommand(ttClientHostName, userName,
           replaceTaskControllerCommand);
       } catch (Exception e) { e.printStackTrace(); };
     }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestNodeDecommissioning.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestNodeDecommissioning.java?rev=1077752&r1=1077751&r2=1077752&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestNodeDecommissioning.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/mapred/TestNodeDecommissioning.java Fri Mar  4 04:51:39 2011
@@ -22,16 +22,17 @@ import java.util.Hashtable;
 
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.Log;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapreduce.test.system.JTProtocol;
 import org.apache.hadoop.mapreduce.test.system.JTClient;
 import org.apache.hadoop.mapreduce.test.system.TTClient;
 import org.apache.hadoop.mapreduce.test.system.MRCluster;
 import org.apache.hadoop.test.system.process.HadoopDaemonRemoteCluster;
 import java.util.List;
-import java.io.IOException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.common.RemoteExecution;
-import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.util.RemoteExecution;
+import org.apache.hadoop.util.SSHRemoteExecution;
 import org.apache.hadoop.examples.SleepJob;
 import org.apache.hadoop.fs.Path;
 import java.net.InetAddress;
@@ -88,8 +89,6 @@ public class TestNodeDecommissioning {
    * First a node is decommissioned and verified.
    * Then it is removed from decommissoned and verified again.
    * At last the node is started.
-   * @param none
-   * @return void
    */
   @Test
   public void TestNodeDecommissioning() throws Exception {
@@ -150,7 +149,8 @@ public class TestNodeDecommissioning {
     String command = "echo " + ttClientHostName + " > " + excludeHostPath;
  
     LOG.info("command is : " + command);
-    RemoteExecution.executeCommand(jtClientHostName, userName, command);
+    RemoteExecution rExec = new SSHRemoteExecution();
+    rExec.executeCommand(jtClientHostName, userName, command);
 
     //The refreshNode command is created and execute in Job Tracker Client.
     String refreshNodeCommand = "export HADOOP_CONF_DIR=" + hadoopConfDir + 
@@ -159,7 +159,7 @@ public class TestNodeDecommissioning {
         ";bin/hadoop mradmin -refreshNodes;"; 
     LOG.info("refreshNodeCommand is : " + refreshNodeCommand);
     try {
-      RemoteExecution.executeCommand(testRunningHostName, userName, 
+      rExec.executeCommand(testRunningHostName, userName,
           refreshNodeCommand);
     } catch (Exception e) { e.printStackTrace();}
 
@@ -172,12 +172,12 @@ public class TestNodeDecommissioning {
     command = "rm " + excludeHostPath;
 
     LOG.info("command is : " + command);
-    RemoteExecution.executeCommand(jtClientHostName, userName, command);
+    rExec.executeCommand(jtClientHostName, userName, command);
 
     Assert.assertTrue("Node should be decommissioned", nodeDecommissionedOrNot);
 
     //The refreshNode command is created and execute in Job Tracker Client.
-    RemoteExecution.executeCommand(jtClientHostName, userName, 
+    rExec.executeCommand(jtClientHostName, userName,
         refreshNodeCommand);
 
     //Checked whether the node is out of decommission.
@@ -192,7 +192,7 @@ public class TestNodeDecommissioning {
         ";kinit -k -t " + keytabForHadoopqaUser + 
         ";bin/hadoop-daemons.sh start tasktracker;";
     LOG.info("ttClientStart is : " + ttClientStart);
-    RemoteExecution.executeCommand(jtClientHostName, userName,
+    rExec.executeCommand(jtClientHostName, userName,
         ttClientStart);
   }
 }

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/RemoteExecution.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/RemoteExecution.java?rev=1077752&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/RemoteExecution.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/RemoteExecution.java Fri Mar  4 04:51:39 2011
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+public interface RemoteExecution {
+  public void executeCommand (String remoteHostName, String user,
+          String  command) throws Exception;
+  public int getExitCode();
+  public String getOutput();
+  public String getCommandString();
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/SSHRemoteExecution.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/SSHRemoteExecution.java?rev=1077752&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/SSHRemoteExecution.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/util/SSHRemoteExecution.java Fri Mar  4 04:51:39 2011
@@ -0,0 +1,203 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import com.jcraft.jsch.*;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.Properties;
+
+/**
+ * Remote Execution of commands  on a remote machine.
+ */
+
+public class SSHRemoteExecution implements RemoteExecution {
+
+  static final Log LOG = LogFactory.getLog(SSHRemoteExecution.class);
+  static final int SSH_PORT = 22;
+  static final String DEFAULT_IDENTITY="id_dsa";
+  static final String DEFAULT_KNOWNHOSTS="known_hosts";
+  static final String FS = System.getProperty("file.separator");
+  static final String LS = System.getProperty("line.separator");
+  private int exitCode;
+  private StringBuffer output;
+  private String commandString;
+
+  final StringBuffer errorMessage = new StringBuffer();
+  public SSHRemoteExecution() throws Exception {
+  }
+
+  protected String getHomeDir() {
+    String currentUser=System.getProperty("user.name");
+    String userHome=System.getProperty("user.home");
+
+    return userHome.substring(0, userHome.indexOf(currentUser)-1);
+  }
+
+  /**
+   * Execute command at remote host under given user
+   * @param remoteHostName remote host name
+   * @param user is the name of the user to be login under;
+   *   current user will be used if this is set to <code>null</code>
+   * @param command to be executed remotely
+   * @param identityFile is the name of alternative identity file; default
+   *   is ~user/.ssh/id_dsa
+   * @param portNumber remote SSH daemon port number, default is 22
+   * @throws Exception in case of errors
+   */
+  public void executeCommand (String remoteHostName, String user,
+          String  command, String identityFile, int portNumber) throws Exception {
+    commandString = command;
+    String sessionUser = System.getProperty("user.name");
+    String userHome=System.getProperty("user.home");
+    if (user != null) {
+      sessionUser = user;
+      userHome = getHomeDir() + FS + user;
+    }
+    String dotSSHDir = userHome + FS + ".ssh";
+    String sessionIdentity = dotSSHDir + FS + DEFAULT_IDENTITY;
+    if (identityFile != null) {
+      sessionIdentity = identityFile;
+    }
+
+    JSch jsch = new JSch();
+
+    Session session = jsch.getSession(sessionUser, remoteHostName, portNumber);
+    jsch.setKnownHosts(dotSSHDir + FS + DEFAULT_KNOWNHOSTS);
+    jsch.addIdentity(sessionIdentity);
+
+    Properties config = new Properties();
+    config.put("StrictHostKeyChecking", "no");
+    session.setConfig(config);
+
+    session.connect(30000);   // making a connection with timeout.
+
+    Channel channel=session.openChannel("exec");
+    ((ChannelExec)channel).setCommand(command);
+    channel.setInputStream(null);
+
+    final BufferedReader errReader =
+            new BufferedReader(
+              new InputStreamReader(((ChannelExec)channel).getErrStream()));
+    BufferedReader inReader =
+            new BufferedReader(new InputStreamReader(channel.getInputStream()));
+
+    channel.connect();
+    Thread errorThread = new Thread() {
+      @Override
+      public void run() {
+        try {
+          String line = errReader.readLine();
+          while((line != null) && !isInterrupted()) {
+            errorMessage.append(line);
+            errorMessage.append(LS);
+            line = errReader.readLine();
+          }
+        } catch(IOException ioe) {
+          LOG.warn("Error reading the error stream", ioe);
+        }
+      }
+    };
+
+    try {
+      errorThread.start();
+    } catch (IllegalStateException e) {
+      LOG.debug(e);
+    }
+    try {
+      parseExecResult(inReader);
+      String line = inReader.readLine();
+      while (line != null) {
+        line = inReader.readLine();
+      }
+
+      if(channel.isClosed()) {
+        exitCode = channel.getExitStatus();
+        LOG.debug("exit-status: " + exitCode);
+      }
+      try {
+        // make sure that the error thread exits
+        errorThread.join();
+      } catch (InterruptedException ie) {
+        LOG.warn("Interrupted while reading the error stream", ie);
+      }
+    } catch (Exception ie) {
+      throw new IOException(ie.toString());
+    }
+    finally {
+      try {
+        inReader.close();
+      } catch (IOException ioe) {
+        LOG.warn("Error while closing the input stream", ioe);
+      }
+      try {
+        errReader.close();
+      } catch (IOException ioe) {
+        LOG.warn("Error while closing the error stream", ioe);
+      }
+      channel.disconnect();
+      session.disconnect();
+    }
+  }
+
+  /**
+   * Execute command at remote host under given username
+   * Default identity is ~/.ssh/id_dsa key will be used
+   * Default known_hosts file is ~/.ssh/known_hosts will be used
+   * @param remoteHostName remote host name
+   * @param user is the name of the user to be login under;
+   *   if equals to <code>null</code> then current user name will be used
+   * @param command to be executed remotely
+   */
+  @Override
+  public void executeCommand (String remoteHostName, String user,
+          String  command) throws Exception {
+    executeCommand(remoteHostName, user, command, null, SSH_PORT);
+  }
+
+  @Override
+  public int getExitCode() {
+    return exitCode;
+  }
+
+  protected void parseExecResult(BufferedReader lines) throws IOException {
+    output = new StringBuffer();
+    char[] buf = new char[512];
+    int nRead;
+    while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
+      output.append(buf, 0, nRead);
+    }
+  }
+
+  /** Get the output of the ssh command.*/
+  @Override
+  public String getOutput() {
+    return (output == null) ? "" : output.toString();
+  }
+
+  /** Get the String representation of ssh command */
+  @Override
+  public String getCommandString() {
+    return commandString;
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/util/TestSSHRemoteExecution.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/util/TestSSHRemoteExecution.java?rev=1077752&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/util/TestSSHRemoteExecution.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/validation/org/apache/hadoop/util/TestSSHRemoteExecution.java Fri Mar  4 04:51:39 2011
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import static org.junit.Assert.assertEquals;
+import org.junit.Test;
+
+public class TestSSHRemoteExecution {
+  
+  @Test
+  /**
+   * Method: executeCommand(String remoteHostName, String user, String  command)
+   */
+  public void testExecuteCommandForRemoteHostNameUserCommand() throws Exception {
+    String command = "ls -l /bin";
+    SSHRemoteExecution sshRE = new SSHRemoteExecution();
+    sshRE.executeCommand("localhost", null, "ls -l /bin");
+    System.out.println(sshRE.getOutput());
+    assertEquals("Exit code should is expected to be 0", sshRE.getExitCode(), 0);
+    assertEquals("Mismatched command string", sshRE.getCommandString(), command);
+  }
+
+  @Test
+  /**
+   * Method: getHomeDir()
+   */
+  public void testGetHomeDir() throws Exception {
+    SSHRemoteExecution sshRE = new SSHRemoteExecution();
+    String ret = sshRE.getHomeDir();
+    assertEquals(System.getProperty("user.home"),
+      ret + System.getProperty("file.separator") +
+        System.getProperty("user.name"));
+  }
+}