You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2011/08/19 22:49:35 UTC

svn commit: r1159782 [3/3] - in /hadoop/common/branches/HDFS-1623: ./ dev-support/ hadoop-assemblies/src/main/resources/assemblies/ hadoop-common/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/i...

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/FenceMethod.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/FenceMethod.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/FenceMethod.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/FenceMethod.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configurable;
+
+/**
+ * A fencing method is a method by which one node can forcibly prevent
+ * another node from making continued progress. This might be implemented
+ * by killing a process on the other node, by denying the other node's
+ * access to shared storage, or by accessing a PDU to cut the other node's
+ * power.
+ * <p>
+ * Since these methods are often vendor- or device-specific, operators
+ * may implement this interface in order to achieve fencing.
+ * <p>
+ * Fencing is configured by the operator as an ordered list of methods to
+ * attempt. Each method will be tried in turn, and the next in the list
+ * will only be attempted if the previous one fails. See {@link NodeFencer}
+ * for more information.
+ * <p>
+ * If an implementation also implements {@link Configurable} then its
+ * <code>setConf</code> method will be called upon instantiation.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public interface FenceMethod {
+  /**
+   * Verify that the given fencing method's arguments are valid.
+   * @param args the arguments provided in the configuration. This may
+   *        be null if the operator did not configure any arguments.
+   * @throws BadFencingConfigurationException if the arguments are invalid
+   */
+  public void checkArgs(String args) throws BadFencingConfigurationException;
+  
+  /**
+   * Attempt to fence the target node.
+   * @param args the configured arguments, which were checked at startup by
+   *             {@link #checkArgs(String)}
+   * @return true if fencing was successful, false if unsuccessful or
+   *              indeterminate
+   * @throws BadFencingConfigurationException if the configuration was
+   *         determined to be invalid only at runtime
+   */
+  public boolean tryFence(String args) throws BadFencingConfigurationException; 
+}
\ No newline at end of file

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/HAState.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ha.ServiceFailedException;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.server.namenode.NameNode.OperationCategory;
+import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
+
+/**
+ * Namenode base state to implement state machine pattern.
+ */
+@InterfaceAudience.Private
+abstract public class HAState {
+  protected final String name;
+
+  /**
+   * Constructor
+   * @param name Name of the state.
+   */
+  public HAState(String name) {
+    this.name = name;
+  }
+
+  /**
+   * Internal method to transition the state of a given namenode to a new state.
+   * @param nn Namenode
+   * @param s new state
+   * @throws ServiceFailedException on failure to transition to new state.
+   */
+  protected final void setStateInternal(final NameNode nn, final HAState s)
+      throws ServiceFailedException {
+    exitState(nn);
+    nn.setState(s);
+    s.enterState(nn);
+  }
+
+  /**
+   * Method to be overridden by subclasses to perform steps necessary for
+   * entering a state.
+   * @param nn Namenode
+   * @throws ServiceFailedException on failure to enter the state.
+   */
+  protected abstract void enterState(final NameNode nn)
+      throws ServiceFailedException;
+
+  /**
+   * Method to be overridden by subclasses to perform steps necessary for
+   * exiting a state.
+   * @param nn Namenode
+   * @throws ServiceFailedException on failure to enter the state.
+   */
+  protected abstract void exitState(final NameNode nn)
+      throws ServiceFailedException;
+
+  /**
+   * Move from the existing state to a new state
+   * @param nn Namenode
+   * @param s new state
+   * @throws ServiceFailedException on failure to transition to new state.
+   */
+  public void setState(NameNode nn, HAState s) throws ServiceFailedException {
+    if (this == s) { // Aleady in the new state
+      return;
+    }
+    throw new ServiceFailedException("Transtion from state " + this + " to "
+        + s + " is not allowed.");
+  }
+  
+  /**
+   * Check if an operation is supported in a given state.
+   * @param nn Namenode
+   * @param op Type of the operation.
+   * @throws UnsupportedActionException if a given type of operation is not
+   *           supported in this state.
+   */
+  public void checkOperation(final NameNode nn, final OperationCategory op)
+      throws UnsupportedActionException {
+    String msg = "Operation category " + op + " is not supported in state "
+        + nn.getState();
+    throw new UnsupportedActionException(msg);
+  }
+  
+  @Override
+  public String toString() {
+    return super.toString();
+  }
+}
\ No newline at end of file

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/NodeFencer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/NodeFencer.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/NodeFencer.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/NodeFencer.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+
+/**
+ * This class parses the configured list of fencing methods, and
+ * is responsible for trying each one in turn while logging informative
+ * output.<p>
+ * 
+ * The fencing methods are configured as a carriage-return separated list.
+ * Each line in the list is of the form:<p>
+ * <code>com.example.foo.MyMethod(arg string)</code>
+ * or
+ * <code>com.example.foo.MyMethod</code>
+ * The class provided must implement the {@link FenceMethod} interface.
+ * The fencing methods that ship with Hadoop may also be referred to
+ * by shortened names:<p>
+ * <ul>
+ * <li><code>shell(/path/to/some/script.sh args...)</code></li>
+ * <li><code>sshfence(...)</code> (see {@link SshFenceByTcpPort})
+ * </ul>
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class NodeFencer {
+  static final String CONF_METHODS_KEY =
+    "dfs.namenode.ha.fencing.methods";
+  
+  private static final String CLASS_RE = "([a-zA-Z0-9\\.\\$]+)";
+  private static final Pattern CLASS_WITH_ARGUMENT =
+    Pattern.compile(CLASS_RE + "\\((.+?)\\)");
+  private static final Pattern CLASS_WITHOUT_ARGUMENT =
+    Pattern.compile(CLASS_RE);
+  private static final Pattern HASH_COMMENT_RE =
+    Pattern.compile("#.*$");
+
+  private static final Log LOG = LogFactory.getLog(NodeFencer.class);
+
+  /**
+   * Standard fencing methods included with HDFS.
+   */
+  private static final Map<String, Class<? extends FenceMethod>> STANDARD_METHODS =
+    ImmutableMap.<String, Class<? extends FenceMethod>>of(
+        "shell", ShellCommandFencer.class,
+        "sshfence", SshFenceByTcpPort.class);
+  
+  private final List<FenceMethodWithArg> methods;
+  
+  public NodeFencer(Configuration conf)
+      throws BadFencingConfigurationException {
+    this.methods = parseMethods(conf);
+  }
+  
+  public boolean fence() {
+    LOG.info("====== Beginning NameNode Fencing Process... ======");
+    int i = 0;
+    for (FenceMethodWithArg method : methods) {
+      LOG.info("Trying method " + (++i) + "/" + methods.size() +": " + method);
+      
+      try {
+        if (method.method.tryFence(method.arg)) {
+          LOG.info("====== Fencing successful by method " + method + " ======");
+          return true;
+        }
+      } catch (BadFencingConfigurationException e) {
+        LOG.error("Fencing method " + method + " misconfigured", e);
+        continue;
+      } catch (Throwable t) {
+        LOG.error("Fencing method " + method + " failed with an unexpected error.", t);
+        continue;
+      }
+      LOG.warn("Fencing method " + method + " was unsuccessful.");
+    }
+    
+    LOG.error("Unable to fence NameNode by any configured method.");
+    return false;
+  }
+
+  private static List<FenceMethodWithArg> parseMethods(Configuration conf)
+  throws BadFencingConfigurationException {
+    String confStr = conf.get(CONF_METHODS_KEY);
+    String[] lines = confStr.split("\\s*\n\\s*");
+    
+    List<FenceMethodWithArg> methods = Lists.newArrayList();
+    for (String line : lines) {
+      line = HASH_COMMENT_RE.matcher(line).replaceAll("");
+      line = line.trim();
+      if (!line.isEmpty()) {
+        methods.add(parseMethod(conf, line));
+      }
+    }
+    
+    return methods;
+  }
+
+  private static FenceMethodWithArg parseMethod(Configuration conf, String line)
+      throws BadFencingConfigurationException {
+    Matcher m;
+    if ((m = CLASS_WITH_ARGUMENT.matcher(line)).matches()) {
+      String className = m.group(1);
+      String arg = m.group(2);
+      
+      return createFenceMethod(conf, className, arg);
+    } else if ((m = CLASS_WITHOUT_ARGUMENT.matcher(line)).matches()) {
+      String className = m.group(1);
+      return createFenceMethod(conf, className, null);
+    } else {
+      throw new BadFencingConfigurationException(
+          "Unable to parse line: '" + line + "'");
+    }
+  }
+
+  private static FenceMethodWithArg createFenceMethod(
+      Configuration conf, String clazzName, String arg)
+      throws BadFencingConfigurationException {
+
+    Class<?> clazz;
+    try {
+      // See if it's a short name for one of the built-in methods
+      clazz = STANDARD_METHODS.get(clazzName);
+      if (clazz == null) {
+        // Try to instantiate the user's custom method
+        clazz = Class.forName(clazzName);
+      }
+    } catch (Exception e) {
+      throw new BadFencingConfigurationException(
+          "Could not find configured fencing method " + clazzName,
+          e);
+    }
+    
+    // Check that it implements the right interface
+    if (!FenceMethod.class.isAssignableFrom(clazz)) {
+      throw new BadFencingConfigurationException("Class " + clazzName +
+          " does not implement FenceMethod");
+    }
+    
+    FenceMethod method = (FenceMethod)ReflectionUtils.newInstance(
+        clazz, conf);
+    method.checkArgs(arg);
+    return new FenceMethodWithArg(method, arg);
+  }
+  
+  private static class FenceMethodWithArg {
+    private final FenceMethod method;
+    private final String arg;
+    
+    private FenceMethodWithArg(FenceMethod method, String arg) {
+      this.method = method;
+      this.arg = arg;
+    }
+    
+    public String toString() {
+      return method.getClass().getCanonicalName() + "(" + arg + ")";
+    }
+  }
+}

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ShellCommandFencer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ShellCommandFencer.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ShellCommandFencer.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ShellCommandFencer.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configured;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Fencing method that runs a shell command. It should be specified
+ * in the fencing configuration like:<br>
+ * <code>
+ *   shell(/path/to/my/script.sh arg1 arg2 ...)
+ * </code><br>
+ * The string between '(' and ')' is passed directly to a bash shell and
+ * may not include any closing parentheses.<p>
+ * 
+ * The shell command will be run with an environment set up to contain
+ * all of the current Hadoop configuration variables, with the '_' character 
+ * replacing any '.' characters in the configuration keys.<p>
+ * 
+ * If the shell command returns an exit code of 0, the fencing is
+ * determined to be successful. If it returns any other exit code, the
+ * fencing was not successful and the next fencing method in the list
+ * will be attempted.<p>
+ * 
+ * <em>Note:</em> this fencing method does not implement any timeout.
+ * If timeouts are necessary, they should be implemented in the shell
+ * script itself (eg by forking a subshell to kill its parent in
+ * some number of seconds).
+ */
+public class ShellCommandFencer
+  extends Configured implements FenceMethod {
+
+  /** Length at which to abbreviate command in long messages */
+  private static final int ABBREV_LENGTH = 20;
+  
+  @VisibleForTesting
+  static Log LOG = LogFactory.getLog(
+      ShellCommandFencer.class);
+  
+  @Override
+  public void checkArgs(String args) throws BadFencingConfigurationException {
+    if (args == null || args.isEmpty()) {
+      throw new BadFencingConfigurationException(
+          "No argument passed to 'shell' fencing method");
+    }
+    // Nothing else we can really check without actually running the command
+  }
+
+  @Override
+  public boolean tryFence(String cmd) {
+    ProcessBuilder builder = new ProcessBuilder(
+        "bash", "-e", "-c", cmd);
+    setConfAsEnvVars(builder.environment());
+
+    Process p;
+    try {
+      p = builder.start();
+      p.getOutputStream().close();
+    } catch (IOException e) {
+      LOG.warn("Unable to execute " + cmd, e);
+      return false;
+    }
+    
+    String pid = tryGetPid(p);
+    LOG.info("Launched fencing command '" + cmd + "' with "
+        + ((pid != null) ? ("pid " + pid) : "unknown pid"));
+    
+    String logPrefix = abbreviate(cmd, ABBREV_LENGTH);
+    if (pid != null) {
+      logPrefix = "[PID " + pid + "] " + logPrefix;
+    }
+    
+    // Pump logs to stderr
+    StreamPumper errPumper = new StreamPumper(
+        LOG, logPrefix, p.getErrorStream(),
+        StreamPumper.StreamType.STDERR);
+    errPumper.start();
+    
+    StreamPumper outPumper = new StreamPumper(
+        LOG, logPrefix, p.getInputStream(),
+        StreamPumper.StreamType.STDOUT);
+    outPumper.start();
+    
+    int rc;
+    try {
+      rc = p.waitFor();
+      errPumper.join();
+      outPumper.join();
+    } catch (InterruptedException ie) {
+      LOG.warn("Interrupted while waiting for fencing command: " + cmd);
+      return false;
+    }
+    
+    return rc == 0;
+  }
+
+  /**
+   * Abbreviate a string by putting '...' in the middle of it,
+   * in an attempt to keep logs from getting too messy.
+   * @param cmd the string to abbreviate
+   * @param len maximum length to abbreviate to
+   * @return abbreviated string
+   */
+  static String abbreviate(String cmd, int len) {
+    if (cmd.length() > len && len >= 5) {
+      int firstHalf = (len - 3) / 2;
+      int rem = len - firstHalf - 3;
+      
+      return cmd.substring(0, firstHalf) + 
+        "..." + cmd.substring(cmd.length() - rem);
+    } else {
+      return cmd;
+    }
+  }
+  
+  /**
+   * Attempt to use evil reflection tricks to determine the
+   * pid of a launched process. This is helpful to ops
+   * if debugging a fencing process that might have gone
+   * wrong. If running on a system or JVM where this doesn't
+   * work, it will simply return null.
+   */
+  private static String tryGetPid(Process p) {
+    try {
+      Class<? extends Process> clazz = p.getClass();
+      if (clazz.getName().equals("java.lang.UNIXProcess")) {
+        Field f = clazz.getDeclaredField("pid");
+        f.setAccessible(true);
+        return String.valueOf(f.getInt(p));
+      } else {
+        LOG.trace("Unable to determine pid for " + p
+            + " since it is not a UNIXProcess");
+        return null;
+      }
+    } catch (Throwable t) {
+      LOG.trace("Unable to determine pid for " + p, t);
+      return null;
+    }
+  }
+
+  /**
+   * Set the environment of the subprocess to be the Configuration,
+   * with '.'s replaced by '_'s.
+   */
+  private void setConfAsEnvVars(Map<String, String> env) {
+    for (Map.Entry<String, String> pair : getConf()) {
+      env.put(pair.getKey().replace('.', '_'), pair.getValue());
+    }
+  }
+}

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/SshFenceByTcpPort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/SshFenceByTcpPort.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/SshFenceByTcpPort.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/SshFenceByTcpPort.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,352 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Collection;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.jcraft.jsch.ChannelExec;
+import com.jcraft.jsch.JSch;
+import com.jcraft.jsch.JSchException;
+import com.jcraft.jsch.Session;
+
+/**
+ * This fencing implementation sshes to the target node and uses <code>fuser</code>
+ * to kill the process listening on the NameNode's TCP port. This is
+ * more accurate than using "jps" since it doesn't require parsing,
+ * and will work even if there are multiple NameNodes running on the
+ * same machine.<p>
+ * It returns a successful status code if:
+ * <ul>
+ * <li><code>fuser</code> indicates it successfully killed a process, <em>or</em>
+ * <li><code>nc -z</code> indicates that nothing is listening on the target port
+ * </ul>
+ * <p>
+ * This fencing mechanism is configured as following in the fencing method
+ * list:
+ * <code>sshfence([username@]nnhost[:ssh-port][, target-nn-port])</code>
+ * where the first argument specifies the username, host, and port to ssh
+ * into, and the second argument specifies the port on which the target
+ * NN process is listening on.
+ * <p>
+ * For example, <code>sshfence(other-nn, 8020)<code> will SSH into
+ * <code>other-nn<code> as the current user on the standard SSH port,
+ * then kill whatever process is listening on port 8020.
+ * <p>
+ * If no <code>target-nn-port</code> is specified, it is assumed that the
+ * target NameNode is listening on the same port as the local NameNode.
+ * <p>
+ * In order to achieve passwordless SSH, the operator must also configure
+ * <code>dfs.namenode.ha.fencing.ssh.private-key-files<code> to point to an
+ * SSH key that has passphrase-less access to the given username and host.
+ */
+public class SshFenceByTcpPort extends Configured
+  implements FenceMethod {
+
+  static final Log LOG = LogFactory.getLog(
+      SshFenceByTcpPort.class);
+  
+  static final String CONF_CONNECT_TIMEOUT_KEY =
+    "dfs.namenode.ha.fencing.ssh.connect-timeout";
+  private static final int CONF_CONNECT_TIMEOUT_DEFAULT =
+    30*1000;
+  static final String CONF_IDENTITIES_KEY =
+    "dfs.namenode.ha.fencing.ssh.private-key-files";
+
+  /**
+   * Verify that the arguments are parseable and that the host
+   * can be resolved.
+   */
+  @Override
+  public void checkArgs(String argStr) throws BadFencingConfigurationException {
+    Args args = new Args(argStr);
+    try {
+      InetAddress.getByName(args.host);
+    } catch (UnknownHostException e) {
+      throw new BadFencingConfigurationException(
+          "Unknown host: " + args.host);
+    }
+  }
+
+  @Override
+  public boolean tryFence(String argsStr)
+      throws BadFencingConfigurationException {
+    Args args = new Args(argsStr);
+    
+    Session session;
+    try {
+      session = createSession(args);
+    } catch (JSchException e) {
+      LOG.warn("Unable to create SSH session", e);
+      return false;
+    }
+
+    LOG.info("Connecting to " + args.host + "...");
+    
+    try {
+      session.connect(getSshConnectTimeout());
+    } catch (JSchException e) {
+      LOG.warn("Unable to connect to " + args.host
+          + " as user " + args.user, e);
+      return false;
+    }
+    LOG.info("Connected to " + args.host);
+
+    int targetPort = args.targetPort != null ?
+        args.targetPort : getDefaultNNPort();
+    try {
+      return doFence(session, targetPort);
+    } catch (JSchException e) {
+      LOG.warn("Unable to achieve fencing on remote host", e);
+      return false;
+    } finally {
+      session.disconnect();
+    }
+  }
+
+
+  private Session createSession(Args args) throws JSchException {
+    JSch jsch = new JSch();
+    for (String keyFile : getKeyFiles()) {
+      jsch.addIdentity(keyFile);
+    }
+    JSch.setLogger(new LogAdapter());
+
+    Session session = jsch.getSession(args.user, args.host, args.sshPort);
+    session.setConfig("StrictHostKeyChecking", "no");
+    return session;
+  }
+
+  private boolean doFence(Session session, int nnPort) throws JSchException {
+    try {
+      LOG.info("Looking for process running on port " + nnPort);
+      int rc = execCommand(session,
+          "PATH=$PATH:/sbin:/usr/sbin fuser -v -k -n tcp " + nnPort);
+      if (rc == 0) {
+        LOG.info("Successfully killed process that was " +
+            "listening on port " + nnPort);
+        // exit code 0 indicates the process was successfully killed.
+        return true;
+      } else if (rc == 1) {
+        // exit code 1 indicates either that the process was not running
+        // or that fuser didn't have root privileges in order to find it
+        // (eg running as a different user)
+        LOG.info(
+            "Indeterminate response from trying to kill NameNode. " +
+            "Verifying whether it is running using nc...");
+        rc = execCommand(session, "nc -z localhost 8020");
+        if (rc == 0) {
+          // the NN is still listening - we are unable to fence
+          LOG.warn("Unable to fence NN - it is running but we cannot kill it");
+          return false;
+        } else {
+          LOG.info("Verified that the NN is down.");
+          return true;          
+        }
+      } else {
+        // other 
+      }
+      LOG.info("rc: " + rc);
+      return rc == 0;
+    } catch (InterruptedException e) {
+      LOG.warn("Interrupted while trying to fence via ssh", e);
+      return false;
+    } catch (IOException e) {
+      LOG.warn("Unknown failure while trying to fence via ssh", e);
+      return false;
+    }
+  }
+  
+  /**
+   * Execute a command through the ssh session, pumping its
+   * stderr and stdout to our own logs.
+   */
+  private int execCommand(Session session, String cmd)
+      throws JSchException, InterruptedException, IOException {
+    LOG.debug("Running cmd: " + cmd);
+    ChannelExec exec = null;
+    try {
+      exec = (ChannelExec)session.openChannel("exec");
+      exec.setCommand(cmd);
+      exec.setInputStream(null);
+      exec.connect();
+      
+
+      // Pump stdout of the command to our WARN logs
+      StreamPumper outPumper = new StreamPumper(LOG, cmd + " via ssh",
+          exec.getInputStream(), StreamPumper.StreamType.STDOUT);
+      outPumper.start();
+      
+      // Pump stderr of the command to our WARN logs
+      StreamPumper errPumper = new StreamPumper(LOG, cmd + " via ssh",
+          exec.getErrStream(), StreamPumper.StreamType.STDERR);
+      errPumper.start();
+      
+      outPumper.join();
+      errPumper.join();
+      return exec.getExitStatus();
+    } finally {
+      cleanup(exec);
+    }
+  }
+
+  private static void cleanup(ChannelExec exec) {
+    if (exec != null) {
+      try {
+        exec.disconnect();
+      } catch (Throwable t) {
+        LOG.warn("Couldn't disconnect ssh channel", t);
+      }
+    }
+  }
+
+  private int getSshConnectTimeout() {
+    return getConf().getInt(
+        CONF_CONNECT_TIMEOUT_KEY, CONF_CONNECT_TIMEOUT_DEFAULT);
+  }
+
+  private Collection<String> getKeyFiles() {
+    return getConf().getTrimmedStringCollection(CONF_IDENTITIES_KEY);
+  }
+  
+  private int getDefaultNNPort() {
+    return NameNode.getAddress(getConf()).getPort();
+  }
+
+  /**
+   * Container for the parsed arg line for this fencing method.
+   */
+  @VisibleForTesting
+  static class Args {
+    private static final Pattern USER_HOST_PORT_RE = Pattern.compile(
+      "(?:(.+?)@)?([^:]+?)(?:\\:(\\d+))?");
+
+    private static final int DEFAULT_SSH_PORT = 22;
+
+    final String user;
+    final String host;
+    final int sshPort;
+    
+    final Integer targetPort;
+    
+    public Args(String args) throws BadFencingConfigurationException {
+      if (args == null) {
+        throw new BadFencingConfigurationException(
+            "Must specify args for ssh fencing configuration");
+      }
+      String[] argList = args.split(",\\s*");
+      if (argList.length > 2 || argList.length == 0) {
+        throw new BadFencingConfigurationException(
+            "Incorrect number of arguments: " + args);
+      }
+      
+      // Parse SSH destination.
+      String sshDestArg = argList[0];
+      Matcher m = USER_HOST_PORT_RE.matcher(sshDestArg);
+      if (!m.matches()) {
+        throw new BadFencingConfigurationException(
+            "Unable to parse SSH destination: "+ sshDestArg);
+      }
+      if (m.group(1) != null) {
+        user = m.group(1);
+      } else {
+        user = System.getProperty("user.name");
+      }
+      
+      host = m.group(2);
+
+      if (m.group(3) != null) {
+        sshPort = parseConfiggedPort(m.group(3));
+      } else {
+        sshPort = DEFAULT_SSH_PORT;
+      }
+      
+      // Parse target port.
+      if (argList.length > 1) {
+        targetPort = parseConfiggedPort(argList[1]);
+      } else {
+        targetPort = null;
+      }
+    }
+
+    private Integer parseConfiggedPort(String portStr)
+        throws BadFencingConfigurationException {
+      try {
+        return Integer.valueOf(portStr);
+      } catch (NumberFormatException nfe) {
+        throw new BadFencingConfigurationException(
+            "Port number '" + portStr + "' invalid");
+      }
+    }
+  }
+
+  /**
+   * Adapter from JSch's logger interface to our log4j
+   */
+  private static class LogAdapter implements com.jcraft.jsch.Logger {
+    static final Log LOG = LogFactory.getLog(
+        SshFenceByTcpPort.class.getName() + ".jsch");
+
+    public boolean isEnabled(int level) {
+      switch (level) {
+      case com.jcraft.jsch.Logger.DEBUG:
+        return LOG.isDebugEnabled();
+      case com.jcraft.jsch.Logger.INFO:
+        return LOG.isInfoEnabled();
+      case com.jcraft.jsch.Logger.WARN:
+        return LOG.isWarnEnabled();
+      case com.jcraft.jsch.Logger.ERROR:
+        return LOG.isErrorEnabled();
+      case com.jcraft.jsch.Logger.FATAL:
+        return LOG.isFatalEnabled();
+      default:
+        return false;
+      }
+    }
+      
+    public void log(int level, String message) {
+      switch (level) {
+      case com.jcraft.jsch.Logger.DEBUG:
+        LOG.debug(message);
+        break;
+      case com.jcraft.jsch.Logger.INFO:
+        LOG.info(message);
+        break;
+      case com.jcraft.jsch.Logger.WARN:
+        LOG.warn(message);
+        break;
+      case com.jcraft.jsch.Logger.ERROR:
+        LOG.error(message);
+        break;
+      case com.jcraft.jsch.Logger.FATAL:
+        LOG.fatal(message);
+        break;
+      }
+    }
+  }
+}

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyState.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyState.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyState.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyState.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import org.apache.hadoop.ha.ServiceFailedException;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+
+/**
+ * Namenode standby state. In this state the namenode acts as warm standby and
+ * keeps the following updated:
+ * <ul>
+ * <li>Namespace by getting the edits.</li>
+ * <li>Block location information by receiving block reports and blocks
+ * received from the datanodes.</li>
+ * </ul>
+ * 
+ * It does not handle read/write/checkpoint operations.
+ */
+public class StandbyState extends HAState {
+  public StandbyState() {
+    super("standby");
+  }
+
+  @Override
+  public void setState(NameNode nn, HAState s) throws ServiceFailedException {
+    if (s == NameNode.ACTIVE_STATE) {
+      setStateInternal(nn, s);
+      return;
+    }
+    super.setState(nn, s);
+  }
+
+  @Override
+  protected void enterState(NameNode nn) throws ServiceFailedException {
+    // TODO:HA
+  }
+
+  @Override
+  protected void exitState(NameNode nn) throws ServiceFailedException {
+    // TODO:HA
+  }
+}
+

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StreamPumper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StreamPumper.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StreamPumper.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StreamPumper.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+import org.apache.commons.logging.Log;
+
+/**
+ * Class responsible for pumping the streams of the subprocess
+ * out to log4j. stderr is pumped to WARN level and stdout is
+ * pumped to INFO level
+ */
+class StreamPumper {
+  enum StreamType {
+    STDOUT, STDERR;
+  }
+
+  private final Log log;
+  
+  final Thread thread;
+  final String logPrefix;
+  final StreamPumper.StreamType type;
+  private final InputStream stream;
+  private boolean started = false;
+  
+  StreamPumper(final Log log, final String logPrefix,
+      final InputStream stream, final StreamType type) {
+    this.log = log;
+    this.logPrefix = logPrefix;
+    this.stream = stream;
+    this.type = type;
+    
+    thread = new Thread(new Runnable() {
+      @Override
+      public void run() {
+        try {
+          pump();
+        } catch (Throwable t) {
+          ShellCommandFencer.LOG.warn(logPrefix +
+              ": Unable to pump output from " + type,
+              t);
+        }
+      }
+    }, logPrefix + ": StreamPumper for " + type);
+    thread.setDaemon(true);
+  }
+  
+  void join() throws InterruptedException {
+    assert started;
+    thread.join();
+  }
+
+  void start() {
+    assert !started;
+    thread.start();
+    started = true;
+  }
+
+  protected void pump() throws IOException {
+    InputStreamReader inputStreamReader = new InputStreamReader(stream);
+    BufferedReader br = new BufferedReader(inputStreamReader);
+    String line = null;
+    while ((line = br.readLine()) != null) {
+      if (type == StreamType.STDOUT) {
+        log.info(logPrefix + ": " + line);
+      } else {
+        log.warn(logPrefix + ": " + line);          
+      }
+    }
+  }
+}

Copied: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java (from r1159756, hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java?p2=hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java&p1=hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java&r1=1159756&r2=1159782&rev=1159782&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/NamenodeProtocols.java Fri Aug 19 20:47:40 2011
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hdfs.server.protocol;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.ha.HAServiceProtocol;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
 import org.apache.hadoop.security.RefreshUserMappingsProtocol;
@@ -32,5 +33,6 @@ public interface NamenodeProtocols
           NamenodeProtocol,
           RefreshAuthorizationPolicyProtocol,
           RefreshUserMappingsProtocol,
-          GetUserMappingsProtocol {
+          GetUserMappingsProtocol,
+          HAServiceProtocol {
 }

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/native/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,8 @@
+/hadoop/common/trunk/hadoop-hdfs/src/main/native:1152502-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/main/native:713112
+/hadoop/core/branches/branch-0.19/mapred/src/c++/libhdfs:713112
+/hadoop/core/trunk/src/c++/libhdfs:776175-784663
+/hadoop/hdfs/branches/HDFS-1052/src/c++/libhdfs:987665-1095512
+/hadoop/hdfs/branches/HDFS-1052/src/main/native:987665-1095512
+/hadoop/hdfs/branches/HDFS-265/src/main/native:796829-820463
+/hadoop/hdfs/branches/branch-0.21/src/main/native:820487

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/webapps/datanode/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+/hadoop/common/trunk/hadoop-hdfs/src/main/webapps/datanode:1158072-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/main/webapps/datanode:713112
+/hadoop/core/branches/branch-0.19/hdfs/src/webapps/datanode:713112
+/hadoop/core/trunk/src/webapps/datanode:776175-784663
+/hadoop/hdfs/branches/HDFS-1052/src/main/webapps/datanode:987665-1095512
+/hadoop/hdfs/branches/HDFS-1052/src/webapps/datanode:987665-1095512
+/hadoop/hdfs/branches/HDFS-265/src/main/webapps/datanode:796829-820463
+/hadoop/hdfs/branches/HDFS-265/src/webapps/datanode:796829-820463
+/hadoop/hdfs/branches/branch-0.21/src/main/webapps/datanode:820487
+/hadoop/hdfs/branches/branch-0.21/src/webapps/datanode:820487

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/webapps/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+/hadoop/common/trunk/hadoop-hdfs/src/main/webapps/hdfs:1152502-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/main/webapps/hdfs:713112
+/hadoop/core/branches/branch-0.19/hdfs/src/webapps/hdfs:713112
+/hadoop/core/trunk/src/webapps/hdfs:776175-784663
+/hadoop/hdfs/branches/HDFS-1052/src/main/webapps/hdfs:987665-1095512
+/hadoop/hdfs/branches/HDFS-1052/src/webapps/hdfs:987665-1095512
+/hadoop/hdfs/branches/HDFS-265/src/main/webapps/hdfs:796829-820463
+/hadoop/hdfs/branches/HDFS-265/src/webapps/hdfs:796829-820463
+/hadoop/hdfs/branches/branch-0.21/src/main/webapps/hdfs:820487
+/hadoop/hdfs/branches/branch-0.21/src/webapps/hdfs:820487

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/webapps/secondary/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+/hadoop/common/trunk/hadoop-hdfs/src/main/webapps/secondary:1152502-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/main/webapps/secondary:713112
+/hadoop/core/branches/branch-0.19/hdfs/src/webapps/secondary:713112
+/hadoop/core/trunk/src/webapps/secondary:776175-784663
+/hadoop/hdfs/branches/HDFS-1052/src/main/webapps/secondary:987665-1095512
+/hadoop/hdfs/branches/HDFS-1052/src/webapps/secondary:987665-1095512
+/hadoop/hdfs/branches/HDFS-265/src/main/webapps/secondary:796829-820463
+/hadoop/hdfs/branches/HDFS-265/src/webapps/secondary:796829-820463
+/hadoop/hdfs/branches/branch-0.21/src/main/webapps/secondary:820487
+/hadoop/hdfs/branches/branch-0.21/src/webapps/secondary:820487

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/
------------------------------------------------------------------------------
--- svn:externals (added)
+++ svn:externals Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+bin https://svn.apache.org/repos/asf/hadoop/common/trunk/common/src/test/bin

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,6 @@
+/hadoop/common/trunk/hadoop-hdfs/src/test/hdfs:1158072-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/test/hdfs:713112
+/hadoop/core/trunk/src/test/hdfs:776175-785643
+/hadoop/hdfs/branches/HDFS-1052/src/test/hdfs:987665-1095512
+/hadoop/hdfs/branches/HDFS-265/src/test/hdfs:796829-820463
+/hadoop/hdfs/branches/branch-0.21/src/test/hdfs:820487

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestNodeFencer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestNodeFencer.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestNodeFencer.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestNodeFencer.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import static org.junit.Assert.*;
+
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestNodeFencer {
+
+  @Before
+  public void clearMockState() {
+    AlwaysSucceedFencer.fenceCalled = 0;
+    AlwaysSucceedFencer.callArgs.clear();
+    AlwaysFailFencer.fenceCalled = 0;
+    AlwaysFailFencer.callArgs.clear();
+  }
+
+  @Test
+  public void testSingleFencer() throws BadFencingConfigurationException {
+    NodeFencer fencer = setupFencer(
+        AlwaysSucceedFencer.class.getName() + "(foo)");
+    assertTrue(fencer.fence());
+    assertEquals(1, AlwaysSucceedFencer.fenceCalled);
+    assertEquals("foo", AlwaysSucceedFencer.callArgs.get(0));
+  }
+  
+  @Test
+  public void testMultipleFencers() throws BadFencingConfigurationException {
+    NodeFencer fencer = setupFencer(
+        AlwaysSucceedFencer.class.getName() + "(foo)\n" +
+        AlwaysSucceedFencer.class.getName() + "(bar)\n");
+    assertTrue(fencer.fence());
+    // Only one call, since the first fencer succeeds
+    assertEquals(1, AlwaysSucceedFencer.fenceCalled);
+    assertEquals("foo", AlwaysSucceedFencer.callArgs.get(0));
+  }
+  
+  @Test
+  public void testWhitespaceAndCommentsInConfig()
+      throws BadFencingConfigurationException {
+    NodeFencer fencer = setupFencer(
+        "\n" +
+        " # the next one will always fail\n" +
+        " " + AlwaysFailFencer.class.getName() + "(foo) # <- fails\n" +
+        AlwaysSucceedFencer.class.getName() + "(bar) \n");
+    assertTrue(fencer.fence());
+    // One call to each, since top fencer fails
+    assertEquals(1, AlwaysFailFencer.fenceCalled);
+    assertEquals(1, AlwaysSucceedFencer.fenceCalled);
+    assertEquals("foo", AlwaysFailFencer.callArgs.get(0));
+    assertEquals("bar", AlwaysSucceedFencer.callArgs.get(0));
+  }
+ 
+  @Test
+  public void testArglessFencer() throws BadFencingConfigurationException {
+    NodeFencer fencer = setupFencer(
+        AlwaysSucceedFencer.class.getName());
+    assertTrue(fencer.fence());
+    // One call to each, since top fencer fails
+    assertEquals(1, AlwaysSucceedFencer.fenceCalled);
+    assertEquals(null, AlwaysSucceedFencer.callArgs.get(0));
+  }
+  
+  @Test
+  public void testShortName() throws BadFencingConfigurationException {
+    NodeFencer fencer = setupFencer("shell(true)");
+    assertTrue(fencer.fence());
+  }
+ 
+  private NodeFencer setupFencer(String confStr)
+      throws BadFencingConfigurationException {
+    System.err.println("Testing configuration:\n" + confStr);
+    Configuration conf = new Configuration();
+    conf.set(NodeFencer.CONF_METHODS_KEY,
+        confStr);
+    return new NodeFencer(conf);
+  }
+  
+  /**
+   * Mock fencing method that always returns true
+   */
+  public static class AlwaysSucceedFencer extends Configured
+      implements FenceMethod {
+    static int fenceCalled = 0;
+    static List<String> callArgs = Lists.newArrayList();
+
+    @Override
+    public boolean tryFence(String args) {
+      callArgs.add(args);
+      fenceCalled++;
+      return true;
+    }
+
+    @Override
+    public void checkArgs(String args) {
+    }
+  }
+  
+  /**
+   * Identical mock to above, except always returns false
+   */
+  public static class AlwaysFailFencer extends Configured
+      implements FenceMethod {
+    static int fenceCalled = 0;
+    static List<String> callArgs = Lists.newArrayList();
+
+    @Override
+    public boolean tryFence(String args) {
+      callArgs.add(args);
+      fenceCalled++;
+      return false;
+    }
+
+    @Override
+    public void checkArgs(String args) {
+    }
+  }
+}

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestShellCommandFencer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestShellCommandFencer.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestShellCommandFencer.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestShellCommandFencer.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import static org.mockito.Mockito.spy;
+
+public class TestShellCommandFencer {
+  private ShellCommandFencer fencer = createFencer();
+  
+  @BeforeClass
+  public static void setupLogSpy() {
+    ShellCommandFencer.LOG = spy(ShellCommandFencer.LOG);
+  }
+  
+  @Before
+  public void resetLogSpy() {
+    Mockito.reset(ShellCommandFencer.LOG);
+  }
+  
+  private static ShellCommandFencer createFencer() {
+    Configuration conf = new Configuration();
+    conf.set("in.fencing.tests", "yessir");
+    ShellCommandFencer fencer = new ShellCommandFencer();
+    fencer.setConf(conf);
+    return fencer;
+  }
+  
+  /**
+   * Test that the exit code of the script determines
+   * whether the fencer succeeded or failed
+   */
+  @Test
+  public void testBasicSuccessFailure() {
+    assertTrue(fencer.tryFence("exit 0"));
+    assertFalse(fencer.tryFence("exit 1"));
+    // bad path should also fail
+    assertFalse(fencer.tryFence("xxxxxxxxxxxx"));
+  }
+  
+  
+  @Test
+  public void testCheckArgs() {
+    try {
+      Configuration conf = new Configuration();
+      conf.set(NodeFencer.CONF_METHODS_KEY, "shell");
+      new NodeFencer(conf);
+      fail("Didn't throw when passing no args to shell");
+    } catch (BadFencingConfigurationException confe) {
+      GenericTestUtils.assertExceptionContains(
+          "No argument passed", confe);
+    }
+  }
+  
+  /**
+   * Test that lines on stdout get passed as INFO
+   * level messages
+   */
+  @Test
+  public void testStdoutLogging() {
+    assertTrue(fencer.tryFence("echo hello"));
+    Mockito.verify(ShellCommandFencer.LOG).info(
+        Mockito.endsWith("echo hello: hello"));
+  }
+   
+  /**
+   * Test that lines on stderr get passed as
+   * WARN level log messages
+   */
+  @Test
+  public void testStderrLogging() {
+    assertTrue(fencer.tryFence("echo hello >&2"));
+    Mockito.verify(ShellCommandFencer.LOG).warn(
+        Mockito.endsWith("echo hello >&2: hello"));
+  }
+
+  /**
+   * Verify that the Configuration gets passed as
+   * environment variables to the fencer.
+   */
+  @Test
+  public void testConfAsEnvironment() {
+    fencer.tryFence("echo $in_fencing_tests");
+    Mockito.verify(ShellCommandFencer.LOG).info(
+        Mockito.endsWith("echo $in...ing_tests: yessir"));
+  }
+
+  /**
+   * Test that we properly close off our input to the subprocess
+   * such that it knows there's no tty connected. This is important
+   * so that, if we use 'ssh', it won't try to prompt for a password
+   * and block forever, for example.
+   */
+  @Test(timeout=10000)
+  public void testSubprocessInputIsClosed() {
+    assertFalse(fencer.tryFence("read"));
+  }
+  
+  @Test
+  public void testCommandAbbreviation() {
+    assertEquals("a...f", ShellCommandFencer.abbreviate("abcdef", 5));
+    assertEquals("abcdef", ShellCommandFencer.abbreviate("abcdef", 6));
+    assertEquals("abcdef", ShellCommandFencer.abbreviate("abcdef", 7));
+
+    assertEquals("a...g", ShellCommandFencer.abbreviate("abcdefg", 5));
+    assertEquals("a...h", ShellCommandFencer.abbreviate("abcdefgh", 5));
+    assertEquals("a...gh", ShellCommandFencer.abbreviate("abcdefgh", 6));
+    assertEquals("ab...gh", ShellCommandFencer.abbreviate("abcdefgh", 7));
+  }
+}

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestSshFenceByTcpPort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestSshFenceByTcpPort.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestSshFenceByTcpPort.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestSshFenceByTcpPort.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import static org.junit.Assert.*;
+
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.server.namenode.ha.SshFenceByTcpPort.Args;
+import org.apache.log4j.Level;
+import org.junit.Assume;
+import org.junit.Test;
+
+public class TestSshFenceByTcpPort {
+
+  static {
+    ((Log4JLogger)SshFenceByTcpPort.LOG).getLogger().setLevel(Level.ALL);
+  }
+  
+  private String TEST_FENCING_ARG = System.getProperty(
+      "test.TestSshFenceByTcpPort.arg", "localhost");
+  private final String TEST_KEYFILE = System.getProperty(
+      "test.TestSshFenceByTcpPort.key");
+
+  @Test(timeout=20000)
+  public void testFence() throws BadFencingConfigurationException {
+    Assume.assumeTrue(isConfigured());
+    Configuration conf = new Configuration();
+    conf.set(SshFenceByTcpPort.CONF_IDENTITIES_KEY, TEST_KEYFILE);
+    FileSystem.setDefaultUri(conf, "localhost:8020");
+    SshFenceByTcpPort fence = new SshFenceByTcpPort();
+    fence.setConf(conf);
+    assertTrue(fence.tryFence(TEST_FENCING_ARG));
+  }
+
+  /**
+   * Test connecting to a host which definitely won't respond.
+   * Make sure that it times out and returns false, but doesn't throw
+   * any exception
+   */
+  @Test(timeout=20000)
+  public void testConnectTimeout() throws BadFencingConfigurationException {
+    Configuration conf = new Configuration();
+    conf.setInt(SshFenceByTcpPort.CONF_CONNECT_TIMEOUT_KEY, 3000);
+    SshFenceByTcpPort fence = new SshFenceByTcpPort();
+    fence.setConf(conf);
+    // Connect to Google's DNS server - not running ssh!
+    assertFalse(fence.tryFence("8.8.8.8"));
+  }
+  
+  @Test
+  public void testArgsParsing() throws BadFencingConfigurationException {
+    Args args = new SshFenceByTcpPort.Args("foo@bar.com:1234");
+    assertEquals("foo", args.user);
+    assertEquals("bar.com", args.host);
+    assertEquals(1234, args.sshPort);
+    assertNull(args.targetPort);
+
+    args = new SshFenceByTcpPort.Args("foo@bar.com");
+    assertEquals("foo", args.user);
+    assertEquals("bar.com", args.host);
+    assertEquals(22, args.sshPort);
+    assertNull(args.targetPort);
+    
+    args = new SshFenceByTcpPort.Args("bar.com");
+    assertEquals(System.getProperty("user.name"), args.user);
+    assertEquals("bar.com", args.host);
+    assertEquals(22, args.sshPort);
+    assertNull(args.targetPort);
+    
+    args = new SshFenceByTcpPort.Args("bar.com:1234, 12345");
+    assertEquals(System.getProperty("user.name"), args.user);
+    assertEquals("bar.com", args.host);
+    assertEquals(1234, args.sshPort);
+    assertEquals(Integer.valueOf(12345), args.targetPort);
+    
+    args = new SshFenceByTcpPort.Args("bar, 8020");
+    assertEquals(Integer.valueOf(8020), args.targetPort);    
+  }
+  
+  @Test
+  public void testBadArgsParsing() throws BadFencingConfigurationException {
+    assertBadArgs(null);
+    assertBadArgs("");
+    assertBadArgs("bar.com:");
+    assertBadArgs("bar.com:x");
+    assertBadArgs("foo.com, x");
+  }
+  
+  private void assertBadArgs(String argStr) {
+    try {
+      new Args(argStr);
+      fail("Did not fail on bad args: " + argStr);
+    } catch (BadFencingConfigurationException e) {
+      // expected
+    }
+  }
+
+  private boolean isConfigured() {
+    return (TEST_FENCING_ARG != null && !TEST_FENCING_ARG.isEmpty()) &&
+      (TEST_KEYFILE != null && !TEST_KEYFILE.isEmpty());
+  }
+}

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+build
+build.properties
+ivy/ivy-*.jar
+logs
+.classpath
+.externalToolBuilders
+.launches
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,2 @@
+/hadoop/common/trunk/hadoop-mapreduce:1152502-1159756
+/hadoop/core/branches/branch-0.19/mapred:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/conf/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,13 @@
+masters
+slaves
+hadoop-env.sh
+hadoop-site.xml
+core-site.xml
+mapred-site.xml
+hdfs-site.xml
+hadoop-policy.xml
+capacity-scheduler.xml
+fair-scheduler.xml
+mapred-queue-acls.xml
+mapred-queues.xml
+

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/conf:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/conf:713112
+/hadoop/core/trunk/conf:784664-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/hadoop-mapreduce-client-app/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/hadoop-mapreduce-client-common/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/hadoop-mapreduce-client-core/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/hadoop-mapreduce-client-hs/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/hadoop-mapreduce-client-jobclient/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-mr-client/hadoop-mapreduce-client-shuffle/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-api/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-common/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-server/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+.classpath
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/ivy/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+ivy-*.jar

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/c++/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/c++:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/c++:713112
+/hadoop/core/trunk/src/c++:776175-784663

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib:713112
+/hadoop/core/trunk/src/contrib:784664-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/block_forensics/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/block_forensics:1158072-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/contrib/block_forensics:713112
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/block_forensics:713112
+/hadoop/core/trunk/src/contrib/block_forensics:784664-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/capacity-scheduler/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/capacity-scheduler:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/capacity-scheduler:713112
+/hadoop/core/trunk/src/contrib/capacity-scheduler:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/capacity-scheduler/src/java/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+mapred-queues.xml

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/data_join/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/data_join:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/data_join:713112
+/hadoop/core/trunk/src/contrib/data_join:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/dynamic-scheduler/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/dynamic-scheduler:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/dynamic-scheduler:713112
+/hadoop/core/branches/branch-0.19/src/contrib/dynamic-scheduler:713112
+/hadoop/core/trunk/src/contrib/dynamic-scheduler:784664-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/eclipse-plugin:1158072-1159756
+/hadoop/core/branches/branch-0.19/core/src/contrib/eclipse-plugin:713112
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/eclipse-plugin:713112
+/hadoop/core/trunk/src/contrib/eclipse-plugin:776175-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/fairscheduler/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/fairscheduler:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/fairscheduler:713112
+/hadoop/core/trunk/src/contrib/fairscheduler:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/index/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/index:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/index:713112
+/hadoop/core/trunk/src/contrib/index:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/index/conf/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+index-config.xml

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/streaming/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/streaming:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/streaming:713112
+/hadoop/core/trunk/src/contrib/streaming:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/contrib/vaidya:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
+/hadoop/core/trunk/src/contrib/vaidya:776175-786373

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/docs/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+build

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/examples/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/examples:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/examples:713112
+/hadoop/core/trunk/src/examples:776175-784663

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/java:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/java:713112
+/hadoop/core/trunk/src/mapred:776175-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/test/
------------------------------------------------------------------------------
--- svn:externals (added)
+++ svn:externals Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+bin https://svn.apache.org/repos/asf/hadoop/common/trunk/common/src/test/bin

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/test/mapred/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/test/mapred:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/test/mapred:713112
+/hadoop/core/trunk/src/test/mapred:776175-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/test/mapred/org/apache/hadoop/fs/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/test/mapred/org/apache/hadoop/fs:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/fs:713112
+/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/fs:776175-785643
+/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/test/mapred/org/apache/hadoop/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/test/mapred/org/apache/hadoop/hdfs:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/hdfs:713112
+/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/hdfs:776175-785643
+/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/hdfs:817878-835934

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/test/mapred/org/apache/hadoop/ipc/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,6 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/test/mapred/org/apache/hadoop/ipc:1158072-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/test/hdfs-with-mr/org/apache/hadoop/ipc:713112
+/hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/ipc:713112
+/hadoop/core/trunk/src/test/hdfs-with-mr/org/apache/hadoop/ipc:776175-784663
+/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/ipc:776175-785643
+/hadoop/hdfs/branches/HDFS-265/src/test/hdfs-with-mr/org/apache/hadoop/ipc:796829-820463

Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce/src/webapps/job/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,3 @@
+/hadoop/common/trunk/hadoop-mapreduce/src/webapps/job:1158072-1159756
+/hadoop/core/branches/branch-0.19/mapred/src/webapps/job:713112
+/hadoop/core/trunk/src/webapps/job:776175-785643

Propchange: hadoop/common/branches/HDFS-1623/hadoop-project-distro/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,5 @@
+.git
+.project
+.settings
+target
+

Modified: hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml Fri Aug 19 20:47:40 2011
@@ -34,6 +34,14 @@
     </snapshotRepository>
   </distributionManagement>
 
+  <repositories>
+    <repository>
+      <id>apache.snapshots.https</id>
+      <name>${distMgmtSnapshotsName}</name>
+      <url>${distMgmtSnapshotsUrl}</url>
+    </repository>
+  </repositories>
+
   <licenses>
     <license>
       <name>The Apache Software License, Version 2.0</name>
@@ -87,11 +95,6 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-common-docs</artifactId>
-        <version>${project.version}</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-common</artifactId>
         <version>${project.version}</version>
       </dependency>
@@ -191,7 +194,7 @@
       <dependency>
         <groupId>commons-logging</groupId>
         <artifactId>commons-logging</artifactId>
-        <version>1.0.4</version>
+        <version>1.1.1</version>
         <exclusions>
           <exclusion>
             <groupId>avalon-framework</groupId>
@@ -210,7 +213,7 @@
       <dependency>
         <groupId>commons-logging</groupId>
         <artifactId>commons-logging-api</artifactId>
-        <version>1.0.4</version>
+        <version>1.1</version>
       </dependency>
       <dependency>
         <groupId>log4j</groupId>
@@ -292,12 +295,12 @@
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
-        <version>1.5.8</version>
+        <version>1.5.11</version>
       </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-log4j12</artifactId>
-        <version>1.5.8</version>
+        <version>1.5.11</version>
       </dependency>
       <dependency>
         <groupId>org.eclipse.jdt</groupId>
@@ -312,7 +315,12 @@
       <dependency>
         <groupId>org.codehaus.jackson</groupId>
         <artifactId>jackson-mapper-asl</artifactId>
-        <version>1.5.2</version>
+        <version>1.6.9</version>
+      </dependency>
+      <dependency>
+        <groupId>org.aspectj</groupId>
+        <artifactId>aspectjtools</artifactId>
+        <version>1.6.5</version>
       </dependency>
       <dependency>
         <groupId>org.aspectj</groupId>
@@ -325,15 +333,14 @@
         <version>1.8.5</version>
       </dependency>
       <dependency>
-        <groupId>org.apache.hadoop</groupId>
+        <groupId>org.apache.avro</groupId>
         <artifactId>avro</artifactId>
-        <version>1.3.2</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.apache.ant</groupId>
-            <artifactId>ant</artifactId>
-          </exclusion>
-        </exclusions>
+        <version>1.5.2</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-ipc</artifactId>
+        <version>1.5.2</version>
       </dependency>
       <dependency>
         <groupId>net.sf.kosmosfs</groupId>
@@ -350,6 +357,16 @@
         <artifactId>protobuf-java</artifactId>
         <version>2.4.0a</version>
       </dependency>
+      <dependency>
+        <groupId>commons-daemon</groupId>
+        <artifactId>commons-daemon</artifactId>
+        <version>1.0.1</version>
+      </dependency>
+      <dependency>
+        <groupId>com.jcraft</groupId>
+        <artifactId>jsch</artifactId>
+        <version>0.1.42</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -446,6 +463,16 @@
           <artifactId>maven-deploy-plugin</artifactId>
           <version>2.5</version>
         </plugin>
+        <plugin>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro-maven-plugin</artifactId>
+          <version>1.5.2</version>
+        </plugin>
+        <plugin>
+          <groupId>org.codehaus.mojo.jspc</groupId>
+          <artifactId>jspc-maven-plugin</artifactId>
+          <version>2.0-alpha-3</version>
+        </plugin>
       </plugins>
     </pluginManagement>
 
@@ -496,6 +523,41 @@
           <target>1.6</target>
         </configuration>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <forkMode>always</forkMode>
+          <forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
+          <argLine>-Xmx1024m</argLine>
+          <environmentVariables>
+            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
+          </environmentVariables>
+          <systemPropertyVariables>
+
+
+            <!-- TODO: all references in testcases should be updated to this default -->
+            <test.build.dir>${test.build.dir}</test.build.dir>
+            <hadoop.tmp.dir>${hadoop.tmp.dir}</hadoop.tmp.dir>
+            <test.build.data>${test.build.data}</test.build.data>
+            <test.build.webapps>${test.build.webapps}</test.build.webapps>
+            <test.cache.data>${test.cache.data}</test.cache.data>
+            <hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
+            <test.build.classes>${test.build.classes}</test.build.classes>
+
+            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
+            <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
+          </systemPropertyVariables>
+          <includes>
+            <include>**/Test*.java</include>
+          </includes>
+          <excludes>
+            <exclude>**/${test.exclude}.java</exclude>
+            <exclude>${test.exclude.pattern}</exclude>
+            <exclude>**/Test*$*.java</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
 
@@ -602,5 +664,4 @@
       </build>
     </profile>
   </profiles>
-
 </project>

Modified: hadoop/common/branches/HDFS-1623/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/pom.xml?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/pom.xml Fri Aug 19 20:47:40 2011
@@ -35,9 +35,11 @@
 
   <modules>
     <module>hadoop-project</module>
+    <module>hadoop-project-distro</module>
     <module>hadoop-assemblies</module>
     <module>hadoop-annotations</module>
     <module>hadoop-common</module>
+    <module>hadoop-hdfs</module>
   </modules>
 
   <build>