You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:14:55 UTC

svn commit: r1077437 [5/5] - in /hadoop/common/branches/branch-0.20-security-patches: ./ src/test/aop/build/ src/test/org/apache/hadoop/mapred/ src/test/system/aop/org/apache/hadoop/hdfs/ src/test/system/aop/org/apache/hadoop/hdfs/server/ src/test/syst...

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManager.java?rev=1077437&r1=1077436&r2=1077437&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManager.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManager.java Fri Mar  4 04:14:53 2011
@@ -60,10 +60,41 @@ public interface ClusterProcessManager {
   void start() throws IOException;
 
   /**
+   * Starts the daemon from the user specified conf dir.
+   * @param newConfLocation the dir where the new conf files reside.
+   * @throws IOException
+   */
+  void start(String newConfLocation) throws IOException;
+
+  /**
+   * Stops the daemon running from user specified conf dir.
+   * 
+   * @param newConfLocation
+   *          the dir where ther new conf files reside.
+   * @throws IOException
+   */
+  void stop(String newConfLocation) throws IOException;
+
+  /**
    * Method to shutdown all the remote daemons.<br/>
    * 
    * @throws IOException if shutdown procedure fails.
    */
   void stop() throws IOException;
+  
+  /**
+   * Gets if multi-user support is enabled for this cluster. 
+   * <br/>
+   * @return true if multi-user support is enabled.
+   * @throws IOException
+   */
+  boolean isMultiUserSupported() throws IOException;
 
+  /**
+   * The pushConfig is used to push a new config to the daemons.
+   * @param localDir
+   * @return is the remoteDir location where config will be pushed
+   * @throws IOException
+   */
+  String pushConfig(String localDir) throws IOException;
 }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java?rev=1077437&r1=1077436&r2=1077437&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java Fri Mar  4 04:14:53 2011
@@ -56,11 +56,17 @@ public abstract class HadoopDaemonRemote
   private static final Log LOG = LogFactory
       .getLog(HadoopDaemonRemoteCluster.class.getName());
 
+  public static final String CONF_HADOOPNEWCONFDIR =
+    "test.system.hdrc.hadoopnewconfdir";
   /**
    * Key used to configure the HADOOP_HOME to be used by the
    * HadoopDaemonRemoteCluster.
    */
-  public final static String CONF_HADOOPHOME = "test.system.hdrc.hadoophome";
+  public final static String CONF_HADOOPHOME =
+    "test.system.hdrc.hadoophome";
+
+  public final static String CONF_SCRIPTDIR =
+    "test.system.hdrc.deployed.scripts.dir";
   /**
    * Key used to configure the HADOOP_CONF_DIR to be used by the
    * HadoopDaemonRemoteCluster.
@@ -72,26 +78,70 @@ public abstract class HadoopDaemonRemote
     "test.system.hdrc.deployed.hadoopconfdir";
 
   private String hadoopHome;
-  private String hadoopConfDir;
-  private String deployed_hadoopConfDir;
+  protected String hadoopConfDir;
+  protected String scriptsDir;
+  protected String hadoopNewConfDir;
   private final Set<Enum<?>> roles;
-
   private final List<HadoopDaemonInfo> daemonInfos;
   private List<RemoteProcess> processes;
-
+  protected Configuration conf;
+  
   public static class HadoopDaemonInfo {
     public final String cmd;
     public final Enum<?> role;
-    public final String hostFile;
-    public HadoopDaemonInfo(String cmd, Enum<?> role, String hostFile) {
+    public final List<String> hostNames;
+    public HadoopDaemonInfo(String cmd, Enum<?> role, List<String> hostNames) {
       super();
       this.cmd = cmd;
       this.role = role;
-      this.hostFile = hostFile;
-      LOG.info("Created HadoopDaemonInfo for " + cmd + " " + role + " from " + hostFile);
+      this.hostNames = hostNames;
+    }
+
+    public HadoopDaemonInfo(String cmd, Enum<?> role, String hostFile) 
+        throws IOException {
+      super();
+      this.cmd = cmd;
+      this.role = role;
+      File file = new File(getDeployedHadoopConfDir(), hostFile);
+      BufferedReader reader = null;
+      hostNames = new ArrayList<String>();
+      try {
+        reader = new BufferedReader(new FileReader(file));
+        String host = null;
+        while ((host = reader.readLine()) != null) {
+          if (host.trim().isEmpty() || host.startsWith("#")) {
+            // Skip empty and possible comment lines
+            // throw new IllegalArgumentException(
+            // "Hostname could not be found in file " + hostFile);
+            continue;
+          }
+          hostNames.add(host.trim());
+        }
+        if (hostNames.size() < 1) {
+          throw new IllegalArgumentException("At least one hostname "
+              +
+            "is required to be present in file - " + hostFile);
+        }
+      } finally {
+        try {
+          reader.close();
+        } catch (IOException e) {
+          LOG.warn("Could not close reader");
+        }
+      }
+      LOG.info("Created HadoopDaemonInfo for " + cmd + " " + role + " from " 
+          + hostFile);
     }
   }
 
+  @Override
+  public String pushConfig(String localDir) throws IOException {
+    for (RemoteProcess process : processes){
+      process.pushConfig(localDir);
+    }
+    return hadoopNewConfDir;
+  }
+
   public HadoopDaemonRemoteCluster(List<HadoopDaemonInfo> daemonInfos) {
     this.daemonInfos = daemonInfos;
     this.roles = new HashSet<Enum<?>>();
@@ -102,9 +152,10 @@ public abstract class HadoopDaemonRemote
 
   @Override
   public void init(Configuration conf) throws IOException {
+    this.conf = conf;
     populateDirectories(conf);
     this.processes = new ArrayList<RemoteProcess>();
-    populateDaemons(deployed_hadoopConfDir);
+    populateDaemons();
   }
 
   @Override
@@ -130,17 +181,10 @@ public abstract class HadoopDaemonRemote
    *           values for the required keys.
    */
   protected void populateDirectories(Configuration conf) {
-    hadoopHome = conf.get(CONF_HADOOPHOME, System
-        .getProperty(CONF_HADOOPHOME));
-    hadoopConfDir = conf.get(CONF_HADOOPCONFDIR, System
-        .getProperty(CONF_HADOOPCONFDIR));
-
-    deployed_hadoopConfDir = conf.get(CONF_DEPLOYED_HADOOPCONFDIR,
-      System.getProperty(CONF_DEPLOYED_HADOOPCONFDIR));
-    if (deployed_hadoopConfDir == null || deployed_hadoopConfDir.isEmpty()) {
-      deployed_hadoopConfDir = hadoopConfDir;
-    }
-
+    hadoopHome = conf.get(CONF_HADOOPHOME);
+    hadoopConfDir = conf.get(CONF_HADOOPCONFDIR);
+    scriptsDir = conf.get(CONF_SCRIPTDIR);
+    hadoopNewConfDir = conf.get(CONF_HADOOPNEWCONFDIR);
     if (hadoopHome == null || hadoopConfDir == null || hadoopHome.isEmpty()
         || hadoopConfDir.isEmpty()) {
       LOG.error("No configuration "
@@ -149,7 +193,17 @@ public abstract class HadoopDaemonRemote
           "No Configuration passed for hadoop home " +
           "and hadoop conf directories");
     }
+  }
 
+  public static String getDeployedHadoopConfDir() {
+    String dir = System.getProperty(CONF_DEPLOYED_HADOOPCONFDIR);
+    if (dir == null || dir.isEmpty()) {
+      LOG.error("No configuration "
+          + "for the CONF_DEPLOYED_HADOOPCONFDIR passed");
+      throw new IllegalArgumentException(
+          "No Configuration passed for hadoop deployed conf directory");
+    }
+    return dir;
   }
 
   @Override
@@ -160,50 +214,52 @@ public abstract class HadoopDaemonRemote
   }
 
   @Override
+  public void start(String newConfLocation)throws IOException {
+    for (RemoteProcess process : processes) {
+      process.start(newConfLocation);
+    }
+  }
+
+  @Override
   public void stop() throws IOException {
     for (RemoteProcess process : processes) {
       process.kill();
     }
   }
 
-  protected void populateDaemon(String confLocation, 
-      HadoopDaemonInfo info) throws IOException {
-    File hostFile = new File(confLocation, info.hostFile);
-    BufferedReader reader = null;
-    reader = new BufferedReader(new FileReader(hostFile));
-    String host = null;
-    try {
-      boolean foundAtLeastOne = false;
-      while ((host = reader.readLine()) != null) {
-        if (host.trim().isEmpty()) {
-          throw new IllegalArgumentException(
-          "Hostname could not be found in file " + info.hostFile);
-        }
-        InetAddress addr = InetAddress.getByName(host);
-        RemoteProcess process = new ScriptDaemon(info.cmd, 
-            addr.getCanonicalHostName(), info.role);
-        processes.add(process);
-        foundAtLeastOne = true;
-      }
-      if (!foundAtLeastOne) {
-        throw new IllegalArgumentException("Alteast one hostname " +
-          "is required to be present in file - " + info.hostFile);
-      }
-    } finally {
-      try {
-        reader.close();
-      } catch (Exception e) {
-        LOG.warn("Could not close reader");
-      }
+  @Override
+  public void stop(String newConfLocation) throws IOException {
+    for (RemoteProcess process : processes) {
+      process.kill(newConfLocation);
+    }
+  }
+
+  protected void populateDaemon(HadoopDaemonInfo info) throws IOException {
+    for (String host : info.hostNames) {
+      InetAddress addr = InetAddress.getByName(host);
+      RemoteProcess process = getProcessManager(info, 
+          addr.getCanonicalHostName());
+      processes.add(process);
     }
   }
 
-  protected void populateDaemons(String confLocation) throws IOException {
+  protected void populateDaemons() throws IOException {
    for (HadoopDaemonInfo info : daemonInfos) {
-     populateDaemon(confLocation, info);
+     populateDaemon(info);
    }
   }
 
+  @Override
+  public boolean isMultiUserSupported() throws IOException {
+    return false;
+  }
+
+  protected RemoteProcess getProcessManager(
+      HadoopDaemonInfo info, String hostName) {
+    RemoteProcess process = new ScriptDaemon(info.cmd, hostName, info.role);
+    return process;
+  }
+
   /**
    * The core daemon class which actually implements the remote process
    * management of actual daemon processes in the cluster.
@@ -214,8 +270,9 @@ public abstract class HadoopDaemonRemote
     private static final String STOP_COMMAND = "stop";
     private static final String START_COMMAND = "start";
     private static final String SCRIPT_NAME = "hadoop-daemon.sh";
-    private final String daemonName;
-    private final String hostName;
+    private static final String PUSH_CONFIG ="pushConfig.sh";
+    protected final String daemonName;
+    protected final String hostName;
     private final Enum<?> role;
 
     public ScriptDaemon(String daemonName, String hostName, Enum<?> role) {
@@ -229,13 +286,57 @@ public abstract class HadoopDaemonRemote
       return hostName;
     }
 
-    private ShellCommandExecutor buildCommandExecutor(String command) {
-      String[] commandArgs = getCommand(command);
-      File binDir = getBinDir();
+    private String[] getPushConfigCommand(String localDir, String remoteDir,
+        File scriptDir) throws IOException{
+      ArrayList<String> cmdArgs = new ArrayList<String>();
+      cmdArgs.add(scriptDir.getAbsolutePath() + File.separator + PUSH_CONFIG);
+      cmdArgs.add(localDir);
+      cmdArgs.add(hostName);
+      cmdArgs.add(remoteDir);
+      cmdArgs.add(hadoopConfDir);
+      return (String[]) cmdArgs.toArray(new String[cmdArgs.size()]);
+    }
+
+    private ShellCommandExecutor buildPushConfig(String local, String remote )
+        throws IOException {
+      File scriptDir = new File(scriptsDir);
+      String[] commandArgs = getPushConfigCommand(local, remote, scriptDir);
       HashMap<String, String> env = new HashMap<String, String>();
-      env.put("HADOOP_CONF_DIR", hadoopConfDir);
       ShellCommandExecutor executor = new ShellCommandExecutor(commandArgs,
-          binDir, env);
+          scriptDir, env);
+      LOG.info(executor.toString());
+      return executor;
+    }
+
+    private ShellCommandExecutor createNewConfDir() throws IOException {
+      ArrayList<String> cmdArgs = new ArrayList<String>();
+      cmdArgs.add("ssh");
+      cmdArgs.add(hostName);
+      cmdArgs.add("if [ -d "+ hadoopNewConfDir+
+          " ];\n then echo Will remove existing directory;  rm -rf "+
+          hadoopNewConfDir+";\nmkdir "+ hadoopNewConfDir+"; else \n"+
+          "echo " + hadoopNewConfDir + " doesnt exist hence creating" +
+          ";  mkdir " + hadoopNewConfDir + ";\n  fi");
+      String[] cmd = (String[]) cmdArgs.toArray(new String[cmdArgs.size()]);
+      ShellCommandExecutor executor = new ShellCommandExecutor(cmd);
+      LOG.info(executor.toString());
+      return executor;
+    }
+
+    @Override
+    public void pushConfig(String localDir) throws IOException {
+      createNewConfDir().execute();
+      buildPushConfig(localDir, hadoopNewConfDir).execute();
+    }
+
+    private ShellCommandExecutor buildCommandExecutor(String command,
+        String confDir) {
+      String[] commandArgs = getCommand(command, confDir);
+      File cwd = new File(".");
+      HashMap<String, String> env = new HashMap<String, String>();
+      env.put("HADOOP_CONF_DIR", confDir);
+      ShellCommandExecutor executor
+        = new ShellCommandExecutor(commandArgs, cwd, env);
       LOG.info(executor.toString());
       return executor;
     }
@@ -245,14 +346,14 @@ public abstract class HadoopDaemonRemote
       return binDir;
     }
 
-    private String[] getCommand(String command) {
+    protected String[] getCommand(String command, String confDir) {
       ArrayList<String> cmdArgs = new ArrayList<String>();
       File binDir = getBinDir();
       cmdArgs.add("ssh");
       cmdArgs.add(hostName);
       cmdArgs.add(binDir.getAbsolutePath() + File.separator + SCRIPT_NAME);
       cmdArgs.add("--config");
-      cmdArgs.add(hadoopConfDir);
+      cmdArgs.add(confDir);
       // XXX Twenty internal version does not support --script option.
       cmdArgs.add(command);
       cmdArgs.add(daemonName);
@@ -261,12 +362,38 @@ public abstract class HadoopDaemonRemote
 
     @Override
     public void kill() throws IOException {
-      buildCommandExecutor(STOP_COMMAND).execute();
+      kill(hadoopConfDir);
     }
 
     @Override
     public void start() throws IOException {
-      buildCommandExecutor(START_COMMAND).execute();
+      start(hadoopConfDir);
+    }
+
+    public void start(String newConfLocation) throws IOException {
+      ShellCommandExecutor cme = buildCommandExecutor(START_COMMAND,
+          newConfLocation);
+      cme.execute();
+      String output = cme.getOutput();
+      if (!output.isEmpty()) { //getOutput() never returns null value
+        if (output.toLowerCase().contains("error")) {
+          LOG.warn("Error is detected.");
+          throw new IOException("Start error\n" + output);
+        }
+      }
+    }
+
+    public void kill(String newConfLocation) throws IOException {
+      ShellCommandExecutor cme
+        = buildCommandExecutor(STOP_COMMAND, newConfLocation);
+      cme.execute();
+      String output = cme.getOutput();
+      if (!output.isEmpty()) { //getOutput() never returns null value
+        if (output.toLowerCase().contains("error")) {
+          LOG.info("Error is detected.");
+          throw new IOException("Kill error\n" + output);
+        }
+      }
     }
 
     @Override

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/MultiUserHadoopDaemonRemoteCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/MultiUserHadoopDaemonRemoteCluster.java?rev=1077437&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/MultiUserHadoopDaemonRemoteCluster.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/MultiUserHadoopDaemonRemoteCluster.java Fri Mar  4 04:14:53 2011
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version
+ * 2.0 (the "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied. See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+package org.apache.hadoop.test.system.process;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.test.system.process.HadoopDaemonRemoteCluster.HadoopDaemonInfo;
+
+public abstract class MultiUserHadoopDaemonRemoteCluster
+    extends HadoopDaemonRemoteCluster {
+
+  public MultiUserHadoopDaemonRemoteCluster(List<HadoopDaemonInfo> daemonInfos) {
+    super(daemonInfos);
+  }
+
+  @Override
+  protected RemoteProcess getProcessManager(
+      HadoopDaemonInfo info, String hostName) {
+    return new MultiUserScriptDaemon(info.cmd, hostName, info.role);
+  }
+
+  @Override
+  public boolean isMultiUserSupported() throws IOException {
+    return true;
+  }
+
+  class MultiUserScriptDaemon extends ScriptDaemon {
+
+    private static final String MULTI_USER_BINARY_PATH_KEY =
+        "test.system.hdrc.multi-user.binary.path";
+    private static final String MULTI_USER_MANAGING_USER =
+        "test.system.hdrc.multi-user.managinguser.";
+    private String binaryPath;
+    /**
+     * Manging user for a particular daemon is gotten by
+     * MULTI_USER_MANAGING_USER + daemonname
+     */
+    private String mangingUser;
+
+    public MultiUserScriptDaemon(
+        String daemonName, String hostName, Enum<?> role) {
+      super(daemonName, hostName, role);
+      initialize(daemonName);
+    }
+
+    private void initialize(String daemonName) {
+      binaryPath = conf.get(MULTI_USER_BINARY_PATH_KEY);
+      if (binaryPath == null || binaryPath.trim().isEmpty()) {
+        throw new IllegalArgumentException(
+            "Binary path for multi-user path is not present. Please set "
+                + MULTI_USER_BINARY_PATH_KEY + " correctly");
+      }
+      File binaryFile = new File(binaryPath);
+      if (!binaryFile.exists() || !binaryFile.canExecute()) {
+        throw new IllegalArgumentException(
+            "Binary file path is not configured correctly. Please set "
+                + MULTI_USER_BINARY_PATH_KEY
+                + " to properly configured binary file.");
+      }
+      mangingUser = conf.get(MULTI_USER_MANAGING_USER + daemonName);
+      if (mangingUser == null || mangingUser.trim().isEmpty()) {
+        throw new IllegalArgumentException(
+            "Manging user for daemon not present please set : "
+                + MULTI_USER_MANAGING_USER + daemonName + " to correct value.");
+      }
+    }
+
+    @Override
+    protected String[] getCommand(String command,String confDir) {
+      ArrayList<String> commandList = new ArrayList<String>();
+      commandList.add(binaryPath);
+      commandList.add(mangingUser);
+      commandList.add(hostName);
+      commandList.add("--config "
+          + confDir + " " + command + " " + daemonName);
+      return (String[]) commandList.toArray(new String[commandList.size()]);
+    }
+  }
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/RemoteProcess.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/RemoteProcess.java?rev=1077437&r1=1077436&r2=1077437&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/RemoteProcess.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/java/org/apache/hadoop/test/system/process/RemoteProcess.java Fri Mar  4 04:14:53 2011
@@ -19,6 +19,7 @@
 package org.apache.hadoop.test.system.process;
 
 import java.io.IOException;
+import org.apache.hadoop.conf.Configuration;
 
 /**
  * Interface to manage the remote process.
@@ -37,18 +38,37 @@ public interface RemoteProcess {
    * @throws IOException if startup fails.
    */
   void start() throws IOException;
-
+  /**
+   * Starts a daemon from user specified conf dir. 
+   * @param newConfLocation is dir where new conf resides. 
+   * @throws IOException
+   */
+  void start(String newConfLocation) throws IOException;
   /**
    * Stop a given daemon process.<br/>
    * 
    * @throws IOException if shutdown fails.
    */
   void kill() throws IOException;
-
+  
+  /**
+   * Stops a given daemon running from user specified 
+   * conf dir. </br>
+   * @throws IOException
+   * @param newconfLocation dir location where new conf resides. 
+   */
+   void kill(String newConfLocation) throws IOException;
   /**
    * Get the role of the Daemon in the cluster.
    * 
    * @return Enum
    */
   Enum<?> getRole();
-}
\ No newline at end of file
+  
+  /**
+   * Pushed the configuration to new configuration directory 
+   * @param localDir
+   * @throws IOException
+   */
+  void pushConfig(String localDir) throws IOException;
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/system/scripts/pushConfig.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/system/scripts/pushConfig.sh?rev=1077437&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/system/scripts/pushConfig.sh (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/system/scripts/pushConfig.sh Fri Mar  4 04:14:53 2011
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# local folder with new configuration file
+LOCAL_DIR=$1
+# remote daemon host
+HOST=$2
+#remote dir points to the location of new config files
+REMOTE_DIR=$3
+# remote daemon HADOOP_CONF_DIR location
+DAEMON_HADOOP_CONF_DIR=$4
+
+if [ $# -ne 4 ]; then
+  echo "Wrong number of parameters" >&2
+  exit 2
+fi
+
+ret_value=0
+
+echo The script makes a remote copy of existing ${DAEMON_HADOOP_CONF_DIR} to ${REMOTE_DIR}
+echo and populates it with new configs prepared in $LOCAL_DIR
+
+ssh ${HOST} cp -r ${DAEMON_HADOOP_CONF_DIR}/* ${REMOTE_DIR}
+ret_value=$?
+
+# make sure files are writeble
+ssh ${HOST} chmod u+w ${REMOTE_DIR}/*
+
+# copy new files over
+scp -r ${LOCAL_DIR}/* ${HOST}:${REMOTE_DIR}
+
+err_code=`echo $? + $ret_value | bc`
+echo Copying of files from local to remote returned ${err_code}
+

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/testjar/JobKillCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/testjar/JobKillCommitter.java?rev=1077437&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/testjar/JobKillCommitter.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/testjar/JobKillCommitter.java Fri Mar  4 04:14:53 2011
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package testjar;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileOutputCommitter;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+
+public class JobKillCommitter {
+  /**
+   * The class provides a overrided implementation of output committer
+   * set up method, which causes the job to fail during set up.
+   */
+  public static class CommitterWithFailSetup extends FileOutputCommitter {
+    @Override
+    public void setupJob(JobContext context) throws IOException {
+      throw new IOException();
+    }
+  }
+
+  /**
+   * The class provides a dummy implementation of outputcommitter
+   * which does nothing
+   */
+  public static class CommitterWithNoError extends FileOutputCommitter {
+    @Override
+    public void setupJob(JobContext context) throws IOException {
+    }
+
+    @Override
+    public void commitJob(JobContext context) throws IOException {
+    }
+  }
+
+  /**
+   * The class provides a overrided implementation of commitJob which
+   * causes the clean up method to fail.
+   */
+  public static class CommitterWithFailCleanup extends FileOutputCommitter {
+    @Override
+    public void commitJob(JobContext context) throws IOException {
+      throw new IOException();
+    }
+  }
+
+  /**
+   * The class is used provides a dummy implementation for mapper method which
+   * does nothing.
+   */
+  public static class MapperPass extends Mapper<LongWritable, Text, Text, Text> {
+    public void map(LongWritable key, Text value, Context context)
+        throws IOException, InterruptedException {
+    }
+  }
+  /**
+  * The class provides a sleep implementation for mapper method.
+  */
+ public static class MapperPassSleep extends 
+     Mapper<LongWritable, Text, Text, Text> {
+   public void map(LongWritable key, Text value, Context context)
+       throws IOException, InterruptedException {
+     Thread.sleep(10000);
+   }
+ }
+
+  /**
+   * The class  provides a way for the mapper function to fail by
+   * intentionally throwing an IOException
+   */
+  public static class MapperFail extends Mapper<LongWritable, Text, Text, Text> {
+    public void map(LongWritable key, Text value, Context context)
+        throws IOException, InterruptedException {
+      throw new IOException();
+    }
+  }
+
+  /**
+   * The class provides a way for the reduce function to fail by
+   * intentionally throwing an IOException
+   */
+  public static class ReducerFail extends Reducer<Text, Text, Text, Text> {
+    public void reduce(Text key, Iterator<Text> values, Context context)
+        throws IOException, InterruptedException {
+      throw new IOException();
+    }
+  }
+
+  /**
+   * The class provides a empty implementation of reducer method that
+   * does nothing
+   */
+  public static class ReducerPass extends Reducer<Text, Text, Text, Text> {
+    public void reduce(Text key, Iterator<Text> values, Context context)
+        throws IOException, InterruptedException {
+    }
+  }
+}