You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by at...@apache.org on 2011/09/14 00:49:37 UTC

svn commit: r1170378 - in /hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common: ./ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/io/ src/main/java/org/apache/hadoop/io/serializer/ src/main/java/org/apache/hadoop/ipc/ sr...

Author: atm
Date: Tue Sep 13 22:49:27 2011
New Revision: 1170378

URL: http://svn.apache.org/viewvc?rev=1170378&view=rev
Log:
Merge trunk into HA branch

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/capacity-scheduler.xml
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/capacity-scheduler.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/commons-logging.properties
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/commons-logging.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-policy.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-queue-acls.xml
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-queue-acls.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/mapred-site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/taskcontroller.cfg
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/taskcontroller.cfg
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java
      - copied unchanged from r1170371, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-datanode
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-jobtracker
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-namenode
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-tasktracker
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-create-user.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-hdfs.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-single-node.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-datanode
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-jobtracker
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-namenode
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-tasktracker
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/update-hadoop-env.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/   (props changed)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Tue Sep 13 22:49:27 2011
@@ -5,13 +5,19 @@ Trunk (unreleased changes)
   IMPROVEMENTS
 
     HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
-  HADOOP-7524 Change RPC to allow multiple protocols including multuple versions of the same protocol (sanjay Radia)
+
+    HADOOP-7524. Change RPC to allow multiple protocols including multuple
+                 versions of the same protocol (sanjay Radia)
+
+    HADOOP-7607. Simplify the RPC proxy cleanup process. (atm)
 
   BUGS
 
     HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
                  by Jersey (Alejandro Abdelnur via atm)
 
+    HADOOP-7610. Fix for hadoop debian package (Eric Yang via gkesavan)
+
 Release 0.23.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -371,6 +377,12 @@ Release 0.23.0 - Unreleased
     HADOOP-7507. Allow ganglia metrics to include the metrics system tags
                  in the gmetric names. (Alejandro Abdelnur via todd)
 
+    HADOOP-7612. Change test-patch to run tests for all nested modules.
+    (tomwhite)
+
+    HADOOP-7599. Script improvements to setup a secure Hadoop cluster
+    (Eric Yang via ddas)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -563,6 +575,14 @@ Release 0.23.0 - Unreleased
     HADOOP-7593. Fix AssertionError in TestHttpServer.testMaxThreads().
     (Uma Maheswara Rao G via szetszwo)
 
+    HADOOP-7598. Fix smart-apply-patch.sh to handle patching from a sub
+    directory correctly. (Robert Evans via acmurthy) 
+
+    HADOOP-7328. When a serializer class is missing, return null, not throw
+    an NPE. (Harsh J Chouraria via todd)
+
+    HADOOP-7626. Bugfix for a config generator (Eric Yang via ddas)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES
@@ -1075,6 +1095,9 @@ Release 0.22.0 - Unreleased
     HADOOP-7390. VersionInfo not generated properly in git after unsplit. (todd
     via atm)
 
+    HADOOP-7568. SequenceFile should not print into stdout.
+    (Plamen Jeliazkov via shv)
+
 Release 0.21.1 - Unreleased
 
   IMPROVEMENTS

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 13 22:49:27 2011
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1166484
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1170371
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 13 22:49:27 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1166484
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1170371
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 13 22:49:27 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1166484
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1170371
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Tue Sep 13 22:49:27 2011
@@ -1668,7 +1668,6 @@ public class SequenceFile {
       try {
         seek(start);
         this.end = this.in.getPos() + length;
-        System.out.println("Setting end to " + end);
         // if it wrapped around, use the max
         if (end < length) {
           end = Long.MAX_VALUE;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java Tue Sep 13 22:49:27 2011
@@ -27,10 +27,10 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
 import org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
 
 /**
  * <p>
@@ -50,14 +50,15 @@ public class SerializationFactory extend
    * <p>
    * Serializations are found by reading the <code>io.serializations</code>
    * property from <code>conf</code>, which is a comma-delimited list of
-   * classnames. 
+   * classnames.
    * </p>
    */
   public SerializationFactory(Configuration conf) {
     super(conf);
-    for (String serializerName : conf.getStrings("io.serializations", 
-      new String[]{WritableSerialization.class.getName(), 
-        AvroSpecificSerialization.class.getName(), 
+    for (String serializerName : conf.getStrings(
+      CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
+      new String[]{WritableSerialization.class.getName(),
+        AvroSpecificSerialization.class.getName(),
         AvroReflectSerialization.class.getName()})) {
       add(conf, serializerName);
     }
@@ -67,27 +68,35 @@ public class SerializationFactory extend
   private void add(Configuration conf, String serializationName) {
     try {
       Class<? extends Serialization> serializionClass =
-	(Class<? extends Serialization>) conf.getClassByName(serializationName);
+        (Class<? extends Serialization>) conf.getClassByName(serializationName);
       serializations.add((Serialization)
-	  ReflectionUtils.newInstance(serializionClass, getConf()));
+      ReflectionUtils.newInstance(serializionClass, getConf()));
     } catch (ClassNotFoundException e) {
       LOG.warn("Serialization class not found: ", e);
     }
   }
 
   public <T> Serializer<T> getSerializer(Class<T> c) {
-    return getSerialization(c).getSerializer(c);
+    Serialization<T> serializer = getSerialization(c);
+    if (serializer != null) {
+      return serializer.getSerializer(c);
+    }
+    return null;
   }
 
   public <T> Deserializer<T> getDeserializer(Class<T> c) {
-    return getSerialization(c).getDeserializer(c);
+    Serialization<T> serializer = getSerialization(c);
+    if (serializer != null) {
+      return serializer.getDeserializer(c);
+    }
+    return null;
   }
 
   @SuppressWarnings("unchecked")
   public <T> Serialization<T> getSerialization(Class<T> c) {
     for (Serialization serialization : serializations) {
       if (serialization.accept(c)) {
-	return (Serialization<T>) serialization;
+        return (Serialization<T>) serialization;
       }
     }
     return null;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java Tue Sep 13 22:49:27 2011
@@ -131,7 +131,7 @@ public class AvroRpcEngine implements Rp
     }
 
     public void close() throws IOException {
-      ENGINE.stopProxy(tunnel);
+      RPC.stopProxy(tunnel);
     }
   }
 
@@ -152,15 +152,6 @@ public class AvroRpcEngine implements Rp
        false);
   }
 
-  /** Stop this proxy. */
-  public void stopProxy(Object proxy) {
-    try {
-      ((Invoker)Proxy.getInvocationHandler(proxy)).close();
-    } catch (IOException e) {
-      LOG.warn("Error while stopping "+proxy, e);
-    }
-  }
-
   private class Invoker implements InvocationHandler, Closeable {
     private final ClientTransceiver tx;
     private final SpecificRequestor requestor;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Tue Sep 13 22:49:27 2011
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.ipc;
 
+import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.Proxy;
 import java.lang.reflect.Method;
 
@@ -26,6 +27,7 @@ import java.net.InetSocketAddress;
 import java.net.NoRouteToHostException;
 import java.net.SocketTimeoutException;
 import java.io.*;
+import java.io.Closeable;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -80,12 +82,8 @@ public class RPC {
   private RPC() {}                                  // no public ctor
 
   // cache of RpcEngines by protocol
-  private static final Map<Class,RpcEngine> PROTOCOL_ENGINES
-    = new HashMap<Class,RpcEngine>();
-
-  // track what RpcEngine is used by a proxy class, for stopProxy()
-  private static final Map<Class,RpcEngine> PROXY_ENGINES
-    = new HashMap<Class,RpcEngine>();
+  private static final Map<Class<?>,RpcEngine> PROTOCOL_ENGINES
+    = new HashMap<Class<?>,RpcEngine>();
 
   private static final String ENGINE_PROP = "rpc.engine";
 
@@ -96,32 +94,23 @@ public class RPC {
    * @param engine the RpcEngine impl
    */
   public static void setProtocolEngine(Configuration conf,
-                                Class protocol, Class engine) {
+                                Class<?> protocol, Class<?> engine) {
     conf.setClass(ENGINE_PROP+"."+protocol.getName(), engine, RpcEngine.class);
   }
 
   // return the RpcEngine configured to handle a protocol
-  private static synchronized RpcEngine getProtocolEngine(Class protocol,
+  private static synchronized RpcEngine getProtocolEngine(Class<?> protocol,
                                                           Configuration conf) {
     RpcEngine engine = PROTOCOL_ENGINES.get(protocol);
     if (engine == null) {
       Class<?> impl = conf.getClass(ENGINE_PROP+"."+protocol.getName(),
                                     WritableRpcEngine.class);
       engine = (RpcEngine)ReflectionUtils.newInstance(impl, conf);
-      if (protocol.isInterface())
-        PROXY_ENGINES.put(Proxy.getProxyClass(protocol.getClassLoader(),
-                                              protocol),
-                          engine);
       PROTOCOL_ENGINES.put(protocol, engine);
     }
     return engine;
   }
 
-  // return the RpcEngine that handles a proxy object
-  private static synchronized RpcEngine getProxyEngine(Object proxy) {
-    return PROXY_ENGINES.get(proxy.getClass());
-  }
-
   /**
    * A version mismatch for the RPC protocol.
    */
@@ -477,13 +466,30 @@ public class RPC {
   }
 
   /**
-   * Stop this proxy and release its invoker's resource
-   * @param proxy the proxy to be stopped
+   * Stop this proxy and release its invoker's resource by getting the
+   * invocation handler for the given proxy object and calling
+   * {@link Closeable#close} if that invocation handler implements
+   * {@link Closeable}.
+   * 
+   * @param proxy the RPC proxy object to be stopped
    */
   public static void stopProxy(Object proxy) {
-    RpcEngine rpcEngine;
-    if (proxy!=null && (rpcEngine = getProxyEngine(proxy)) != null) {
-      rpcEngine.stopProxy(proxy);
+    InvocationHandler invocationHandler = null;
+    try {
+      invocationHandler = Proxy.getInvocationHandler(proxy);
+    } catch (IllegalArgumentException e) {
+      LOG.error("Tried to call RPC.stopProxy on an object that is not a proxy.", e);
+    }
+    if (proxy != null && invocationHandler != null &&
+        invocationHandler instanceof Closeable) {
+      try {
+        ((Closeable)invocationHandler).close();
+      } catch (IOException e) {
+        LOG.error("Stopping RPC invocation handler caused exception", e);
+      }
+    } else {
+      LOG.error("Could not get invocation handler " + invocationHandler +
+          " for proxy " + proxy + ", or invocation handler is not closeable.");
     }
   }
 
@@ -532,7 +538,7 @@ public class RPC {
   }
 
   /** Construct a server for a protocol implementation instance. */
-  public static Server getServer(Class protocol,
+  public static Server getServer(Class<?> protocol,
                                  Object instance, String bindAddress,
                                  int port, Configuration conf) 
     throws IOException {
@@ -543,7 +549,7 @@ public class RPC {
    * @deprecated secretManager should be passed.
    */
   @Deprecated
-  public static Server getServer(Class protocol,
+  public static Server getServer(Class<?> protocol,
                                  Object instance, String bindAddress, int port,
                                  int numHandlers,
                                  boolean verbose, Configuration conf) 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java Tue Sep 13 22:49:27 2011
@@ -41,9 +41,6 @@ public interface RpcEngine {
                   UserGroupInformation ticket, Configuration conf,
                   SocketFactory factory, int rpcTimeout) throws IOException;
 
-  /** Stop this proxy. */
-  void stopProxy(Object proxy);
-
   /** Expert: Make multiple, parallel calls to a set of servers. */
   Object[] call(Method method, Object[][] params, InetSocketAddress[] addrs,
                 UserGroupInformation ticket, Configuration conf)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Tue Sep 13 22:49:27 2011
@@ -30,6 +30,7 @@ import java.io.*;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.io.Closeable;
 import java.util.Map;
 import java.util.HashMap;
 
@@ -219,7 +220,7 @@ public class WritableRpcEngine implement
 
   private static ClientCache CLIENTS=new ClientCache();
   
-  private static class Invoker implements InvocationHandler {
+  private static class Invoker implements InvocationHandler, Closeable {
     private Client.ConnectionId remoteId;
     private Client client;
     private boolean isClosed = false;
@@ -250,7 +251,7 @@ public class WritableRpcEngine implement
     }
     
     /* close the IPC client that's responsible for this invoker's RPCs */ 
-    synchronized private void close() {
+    synchronized public void close() {
       if (!isClosed) {
         isClosed = true;
         CLIENTS.stopClient(client);
@@ -281,15 +282,6 @@ public class WritableRpcEngine implement
             factory, rpcTimeout));
     return new ProtocolProxy<T>(protocol, proxy, true);
   }
-
-  /**
-   * Stop this proxy and release its invoker's resource
-   * @param proxy the proxy to be stopped
-   */
-  public void stopProxy(Object proxy) {
-    ((Invoker)Proxy.getInvocationHandler(proxy)).close();
-  }
-
   
   /** Expert: Make multiple, parallel calls to a set of servers. */
   public Object[] call(Method method, Object[][] params,

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-datanode
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-datanode?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-datanode (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-datanode Tue Sep 13 22:49:27 2011
@@ -39,6 +39,14 @@ fi
 
 . /lib/lsb/init-functions
 
+if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+  DN_USER="root"
+  IDENT_USER=${HADOOP_SECURE_DN_USER}
+else
+  DN_USER="hdfs"
+  IDENT_USER=${DN_USER}
+fi
+
 # Are we running from init?
 run_by_init() {
     ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
@@ -67,13 +75,14 @@ check_privsep_dir() {
 }
 
 export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+export HADOOP_PREFIX="/usr"
 
 case "$1" in
   start)
 	check_privsep_dir
 	check_for_no_start
 	log_daemon_msg "Starting Apache Hadoop Data Node server" "hadoop-datanode"
-	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid -c ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -81,7 +90,7 @@ case "$1" in
 	;;
   stop)
 	log_daemon_msg "Stopping Apache Hadoop Data Node server" "hadoop-datanode"
-	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid; then
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -91,9 +100,9 @@ case "$1" in
   restart)
 	check_privsep_dir
 	log_daemon_msg "Restarting Apache Hadoop Data Node server" "hadoop-datanode"
-	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid
 	check_for_no_start log_end_msg
-	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid -c ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
 	    log_end_msg 0
 	else
 	    log_end_msg 1
@@ -104,14 +113,14 @@ case "$1" in
 	check_privsep_dir
 	log_daemon_msg "Restarting Apache Hadoop Data Node server" "hadoop-datanode"
 	set +e
-	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid
 	RET="$?"
 	set -e
 	case $RET in
 	    0)
 		# old daemon stopped
 		check_for_no_start log_end_msg
-		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid -c ${DN_USER} -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
 		    log_end_msg 0
 		else
 		    log_end_msg 1
@@ -131,7 +140,7 @@ case "$1" in
 	;;
 
   status)
-	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid ${JAVA_HOME}/bin/java hadoop-datanode && exit 0 || exit $?
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-${IDENT_USER}-datanode.pid ${JAVA_HOME}/bin/java hadoop-datanode && exit 0 || exit $?
 	;;
 
   *)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-jobtracker?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-jobtracker (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-jobtracker Tue Sep 13 22:49:27 2011
@@ -67,6 +67,7 @@ check_privsep_dir() {
 }
 
 export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+export HADOOP_PREFIX="/usr"
 
 case "$1" in
   start)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-namenode
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-namenode?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-namenode (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-namenode Tue Sep 13 22:49:27 2011
@@ -67,10 +67,11 @@ check_privsep_dir() {
 }
 
 format() {
-    su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format' hdfs
+    sudo -u hdfs ${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format
 }
 
 export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+export HADOOP_PREFIX="/usr"
 
 case "$1" in
   start)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-tasktracker?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-tasktracker (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/init.d/hadoop-tasktracker Tue Sep 13 22:49:27 2011
@@ -67,6 +67,7 @@ check_privsep_dir() {
 }
 
 export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+export HADOOP_PREFIX="/usr"
 
 case "$1" in
   start)

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-create-user.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-create-user.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-create-user.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-create-user.sh Tue Sep 13 22:49:27 2011
@@ -14,9 +14,10 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-bin=`dirname "$0"`
-bin=`cd "$bin"; pwd`
+this="${BASH_SOURCE-$0}"
+bin=$(cd -P -- "$(dirname -- "$this")" && pwd -P)
+script="$(basename -- "$this")"
+this="$bin/$script"
 
 if [ "$HADOOP_HOME" != "" ]; then
   echo "Warning: \$HADOOP_HOME is deprecated."
@@ -29,30 +30,86 @@ usage() {
   echo "
 usage: $0 <parameters>
   Require parameter:
-     -u <username>                                 Create user on HDFS
+     --config /etc/hadoop                                  Location of Hadoop configuration file
+     -u <username>                                         Create user on HDFS
   Optional parameters:
-     -h                                            Display this message
+     -h                                                    Display this message
+     --kerberos-realm=KERBEROS.EXAMPLE.COM                 Set Kerberos realm
+     --super-user=hdfs                                     Set super user id
+     --super-user-keytab=/etc/security/keytabs/hdfs.keytab Set super user keytab location
   "
   exit 1
 }
 
-# Parse script parameters
-if [ $# != 2 ] ; then
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'kerberos-realm:' \
+  -l 'super-user:' \
+  -l 'super-user-keytab:' \
+  -o 'h' \
+  -o 'u' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
     usage
     exit 1
 fi
 
-while getopts "hu:" OPTION
-do
-  case $OPTION in
-    u)
-      SETUP_USER=$2; shift 2
-      ;; 
-    h)
+create_user() {
+  if [ "${SETUP_USER}" = "" ]; then
+    break
+  fi
+  HADOOP_HDFS_USER=${HADOOP_HDFS_USER:-hdfs}
+  export HADOOP_PREFIX
+  export HADOOP_CONF_DIR
+  export JAVA_HOME
+  export SETUP_USER=${SETUP_USER}
+  export SETUP_PATH=/user/${SETUP_USER}
+
+  if [ ! "${KERBEROS_REALM}" = "" ]; then
+    # locate kinit cmd
+    if [ -e /etc/lsb-release ]; then
+      KINIT_CMD="/usr/bin/kinit -kt ${HDFS_USER_KEYTAB} ${HADOOP_HDFS_USER}"
+    else
+      KINIT_CMD="/usr/kerberos/bin/kinit -kt ${HDFS_USER_KEYTAB} ${HADOOP_HDFS_USER}"
+    fi
+    su -c "${KINIT_CMD}" ${HADOOP_HDFS_USER} 
+  fi
+
+  su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir ${SETUP_PATH}" ${HADOOP_HDFS_USER}
+  su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown ${SETUP_USER}:${SETUP_USER} ${SETUP_PATH}" ${HADOOP_HDFS_USER}
+  su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chmod 711 ${SETUP_PATH}" ${HADOOP_HDFS_USER}
+
+  if [ "$?" == "0" ]; then
+    echo "User directory has been setup: ${SETUP_PATH}"
+  fi
+}
+
+eval set -- "${OPTS}"
+while true; do
+  case "$1" in
+    -u)
+      shift
+      ;;
+    --kerberos-realm)
+      KERBEROS_REALM=$2; shift 2
+      ;;
+    --super-user)
+      HADOOP_HDFS_USER=$2; shift 2
+      ;;
+    --super-user-keytab)
+      HDFS_USER_KEYTAB=$2; shift 2
+      ;;
+    -h)
       usage
       ;; 
     --)
-      shift ; break
+      while shift; do
+        SETUP_USER=$1
+        create_user
+      done
+      break
       ;;
     *)
       echo "Unknown option: $1"
@@ -62,15 +119,3 @@ do
   esac
 done 
 
-# Create user directory on HDFS
-export SETUP_USER
-export SETUP_PATH=/user/${SETUP_USER}
-export HADOOP_PREFIX
-export HADOOP_CONF_DIR
-
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir ${SETUP_PATH}' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown ${SETUP_USER}:${SETUP_USER} ${SETUP_PATH}' hdfs
-
-if [ "$?" == "0" ]; then
-  echo "User directory has been setup: ${SETUP_PATH}"
-fi

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh Tue Sep 13 22:49:27 2011
@@ -18,35 +18,60 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-if [ "$HADOOP_HOME" != "" ]; then
-  echo "Warning: \$HADOOP_HOME is deprecated."
-  echo
-fi
-
-. "$bin"/../libexec/hadoop-config.sh
+this="${BASH_SOURCE-$0}"
+export HADOOP_PREFIX=`dirname "$this"`/..
 
 usage() {
   echo "
 usage: $0 <parameters>
 
   Optional parameters:
-     --auto                                                          Setup automatically
-     --default                                                       Generate default config
-     --conf-dir=/etc/hadoop                                          Set config directory
+     --auto                                                          Setup path and configuration automatically
+     --default                                                       Setup configuration as default
+     --conf-dir=/etc/hadoop                                          Set configuration directory
      --datanode-dir=/var/lib/hadoop/hdfs/datanode                    Set datanode directory
+     --group=hadoop                                                  Set Hadoop group name
      -h                                                              Display this message
-     --jobtracker-url=hostname:9001                                  Set jobtracker url
+     --hdfs-user=hdfs                                                Set HDFS user
+     --jobtracker-host=hostname                                      Set jobtracker host
+     --namenode-host=hostname                                        Set namenode host
+     --secondarynamenode-host=hostname                               Set secondary namenode host
+     --kerberos-realm=KERBEROS.EXAMPLE.COM                           Set Kerberos realm
+     --kinit-location=/usr/kerberos/bin/kinit                        Set kinit location
+     --keytab-dir=/etc/security/keytabs                              Set keytab directory
      --log-dir=/var/log/hadoop                                       Set log directory
-     --hdfs-dir=/var/lib/hadoop/hdfs                                 Set hdfs directory
+     --pid-dir=/var/run/hadoop                                       Set pid directory
+     --hdfs-dir=/var/lib/hadoop/hdfs                                 Set HDFS directory
+     --hdfs-user-keytab=/home/hdfs/hdfs.keytab                       Set HDFS user key tab
      --mapred-dir=/var/lib/hadoop/mapred                             Set mapreduce directory
+     --mapreduce-user=mr                                             Set mapreduce user
+     --mapreduce-user-keytab=/home/mr/hdfs.keytab                    Set mapreduce user key tab
      --namenode-dir=/var/lib/hadoop/hdfs/namenode                    Set namenode directory
-     --namenode-url=hdfs://hostname:9000/                            Set namenode url
      --replication=3                                                 Set replication factor
      --taskscheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler  Set task scheduler
+     --datanodes=hostname1,hostname2,...                             SET the datanodes
+     --tasktrackers=hostname1,hostname2,...                          SET the tasktrackers
   "
   exit 1
 }
 
+check_permission() {
+  TARGET=$1
+  OWNER="0"
+  RESULT=0
+  while [ "$TARGET" != "/" ]; do
+    PARENT=`dirname $TARGET`
+    NAME=`basename $TARGET`
+    OWNER=`ls -ln $PARENT | grep $NAME| awk '{print $3}'`
+    if [ "$OWNER" != "0" ]; then
+      RESULT=1
+      break
+    fi
+    TARGET=`dirname $TARGET`
+  done
+  return $RESULT
+}
+
 template_generator() {
   REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
   cat $1 |
@@ -64,17 +89,30 @@ OPTS=$(getopt \
   -n $0 \
   -o '' \
   -l 'auto' \
+  -l 'java-home:' \
   -l 'conf-dir:' \
   -l 'default' \
+  -l 'group:' \
   -l 'hdfs-dir:' \
   -l 'namenode-dir:' \
   -l 'datanode-dir:' \
   -l 'mapred-dir:' \
-  -l 'namenode-url:' \
-  -l 'jobtracker-url:' \
+  -l 'namenode-host:' \
+  -l 'secondarynamenode-host:' \
+  -l 'jobtracker-host:' \
   -l 'log-dir:' \
+  -l 'pid-dir:' \
   -l 'replication:' \
   -l 'taskscheduler:' \
+  -l 'hdfs-user:' \
+  -l 'hdfs-user-keytab:' \
+  -l 'mapreduce-user:' \
+  -l 'mapreduce-user-keytab:' \
+  -l 'keytab-dir:' \
+  -l 'kerberos-realm:' \
+  -l 'kinit-location:' \
+  -l 'datanodes:' \
+  -l 'tasktrackers:' \
   -o 'h' \
   -- "$@") 
   
@@ -84,8 +122,7 @@ fi
 
 # Make sure the HADOOP_LOG_DIR is not picked up from user environment.
 unset HADOOP_LOG_DIR
-
-# Parse script parameters  
+  
 eval set -- "${OPTS}"
 while true ; do
   case "$1" in
@@ -94,6 +131,10 @@ while true ; do
       AUTOMATED=1
       shift
       ;; 
+    --java-home)
+      JAVA_HOME=$2; shift 2
+      AUTOMATED=1
+      ;; 
     --conf-dir)
       HADOOP_CONF_DIR=$2; shift 2
       AUTOMATED=1
@@ -101,6 +142,10 @@ while true ; do
     --default)
       AUTOMATED=1; shift
       ;;
+    --group)
+      HADOOP_GROUP=$2; shift 2
+      AUTOMATED=1
+      ;;
     -h)
       usage
       ;; 
@@ -120,11 +165,15 @@ while true ; do
       HADOOP_MAPRED_DIR=$2; shift 2
       AUTOMATED=1
       ;; 
-    --namenode-url)
+    --namenode-host)
       HADOOP_NN_HOST=$2; shift 2
       AUTOMATED=1
       ;; 
-    --jobtracker-url)
+    --secondarynamenode-host)
+      HADOOP_SNN_HOST=$2; shift 2
+      AUTOMATED=1
+      ;; 
+    --jobtracker-host)
       HADOOP_JT_HOST=$2; shift 2
       AUTOMATED=1
       ;; 
@@ -132,6 +181,10 @@ while true ; do
       HADOOP_LOG_DIR=$2; shift 2
       AUTOMATED=1
       ;; 
+    --pid-dir)
+      HADOOP_PID_DIR=$2; shift 2
+      AUTOMATED=1
+      ;; 
     --replication)
       HADOOP_REPLICATION=$2; shift 2
       AUTOMATED=1
@@ -139,7 +192,46 @@ while true ; do
     --taskscheduler)
       HADOOP_TASK_SCHEDULER=$2; shift 2
       AUTOMATED=1
-      ;; 
+      ;;
+    --hdfs-user)
+      HADOOP_HDFS_USER=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --mapreduce-user)
+      HADOOP_MR_USER=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --keytab-dir)
+      KEYTAB_DIR=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --hdfs-user-keytab)
+      HDFS_KEYTAB=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --mapreduce-user-keytab)
+      MR_KEYTAB=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --kerberos-realm)
+      KERBEROS_REALM=$2; shift 2
+      SECURITY_TYPE="kerberos"
+      AUTOMATED=1
+      ;;
+    --kinit-location)
+      KINIT=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --datanodes)
+      DATANODES=$2; shift 2
+      AUTOMATED=1
+      DATANODES=$(echo $DATANODES | tr ',' ' ')
+      ;;
+    --tasktrackers)
+      TASKTRACKERS=$2; shift 2
+      AUTOMATED=1
+      TASKTRACKERS=$(echo $TASKTRACKERS | tr ',' ' ')
+      ;;
     --)
       shift ; break
       ;;
@@ -151,21 +243,40 @@ while true ; do
   esac
 done 
 
-# Fill in default values, if parameters have not been defined.
 AUTOSETUP=${AUTOSETUP:-1}
 JAVA_HOME=${JAVA_HOME:-/usr/java/default}
-HADOOP_NN_HOST=${HADOOP_NN_HOST:-hdfs://`hostname`:9000/}
+HADOOP_GROUP=${HADOOP_GROUP:-hadoop}
+HADOOP_NN_HOST=${HADOOP_NN_HOST:-`hostname`}
 HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
 HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
-HADOOP_JT_HOST=${HADOOP_JT_HOST:-`hostname`:9001}
+HADOOP_JT_HOST=${HADOOP_JT_HOST:-`hostname`}
 HADOOP_HDFS_DIR=${HADOOP_HDFS_DIR:-/var/lib/hadoop/hdfs}
 HADOOP_MAPRED_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/mapred}
 HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-/var/log/hadoop}
+HADOOP_PID_DIR=${HADOOP_PID_DIR:-/var/log/hadoop}
 HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop}
 HADOOP_REPLICATION=${HADOOP_RELICATION:-3}
 HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
+HADOOP_HDFS_USER=${HADOOP_HDFS_USER:-hdfs}
+HADOOP_MR_USER=${HADOOP_MR_USER:-mr}
+KEYTAB_DIR=${KEYTAB_DIR:-/etc/security/keytabs}
+HDFS_KEYTAB=${HDFS_KEYTAB:-/home/hdfs/hdfs.keytab}
+MR_KEYTAB=${MR_KEYTAB:-/home/mr/mr.keytab}
+KERBEROS_REALM=${KERBEROS_REALM:-KERBEROS.EXAMPLE.COM}
+SECURITY_TYPE=${SECURITY_TYPE:-simple}
+KINIT=${KINIT:-/usr/kerberos/bin/kinit}
+if [ "${SECURITY_TYPE}" = "kerberos" ]; then
+  TASK_CONTROLLER="org.apache.hadoop.mapred.LinuxTaskController"
+  HADOOP_DN_ADDR="0.0.0.0:1019"
+  HADOOP_DN_HTTP_ADDR="0.0.0.0:1022"
+  SECURITY="true"
+else
+  TASK_CONTROLLER="org.apache.hadoop.mapred.DefaultTaskController"
+  HADDOP_DN_ADDR="0.0.0.0:50010"
+  HADOOP_DN_HTTP_ADDR="0.0.0.0:50075"
+  SECURITY="false"
+fi
 
-# Interactive setup wizard
 if [ "${AUTOMATED}" != "1" ]; then
   echo "Setup Hadoop Configuration"
   echo
@@ -173,18 +284,18 @@ if [ "${AUTOMATED}" != "1" ]; then
   read USER_HADOOP_CONF_DIR
   echo -n "Where would you like to put log directory? (${HADOOP_LOG_DIR}) "
   read USER_HADOOP_LOG_DIR
-  echo -n "What is the url of the namenode? (${HADOOP_NN_HOST}) "
+  echo -n "Where would you like to put pid directory? (${HADOOP_PID_DIR}) "
+  read USER_HADOOP_PID_DIR
+  echo -n "What is the host of the namenode? (${HADOOP_NN_HOST}) "
   read USER_HADOOP_NN_HOST
   echo -n "Where would you like to put namenode data directory? (${HADOOP_NN_DIR}) "
   read USER_HADOOP_NN_DIR
   echo -n "Where would you like to put datanode data directory? (${HADOOP_DN_DIR}) "
   read USER_HADOOP_DN_DIR
-  echo -n "What is the url of the jobtracker? (${HADOOP_JT_HOST}) "
+  echo -n "What is the host of the jobtracker? (${HADOOP_JT_HOST}) "
   read USER_HADOOP_JT_HOST
   echo -n "Where would you like to put jobtracker/tasktracker data directory? (${HADOOP_MAPRED_DIR}) "
   read USER_HADOOP_MAPRED_DIR
-  echo -n "Which taskscheduler would you like? (${HADOOP_TASK_SCHEDULER}) "
-  read USER_HADOOP_TASK_SCHEDULER
   echo -n "Where is JAVA_HOME directory? (${JAVA_HOME}) "
   read USER_JAVA_HOME
   echo -n "Would you like to create directories/copy conf files to localhost? (Y/n) "
@@ -199,16 +310,18 @@ if [ "${AUTOMATED}" != "1" ]; then
   HADOOP_MAPRED_DIR=${USER_HADOOP_MAPRED_DIR:-$HADOOP_MAPRED_DIR}
   HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
   HADOOP_LOG_DIR=${USER_HADOOP_LOG_DIR:-$HADOOP_LOG_DIR}
+  HADOOP_PID_DIR=${USER_HADOOP_PID_DIR:-$HADOOP_PID_DIR}
   HADOOP_CONF_DIR=${USER_HADOOP_CONF_DIR:-$HADOOP_CONF_DIR}
   AUTOSETUP=${USER_AUTOSETUP:-y}
   echo "Review your choices:"
   echo
   echo "Config directory            : ${HADOOP_CONF_DIR}"
   echo "Log directory               : ${HADOOP_LOG_DIR}"
-  echo "Namenode url                : ${HADOOP_NN_HOST}"
+  echo "PID directory               : ${HADOOP_PID_DIR}"
+  echo "Namenode host               : ${HADOOP_NN_HOST}"
   echo "Namenode directory          : ${HADOOP_NN_DIR}"
   echo "Datanode directory          : ${HADOOP_DN_DIR}"
-  echo "Jobtracker url              : ${HADOOP_JT_HOST}"
+  echo "Jobtracker host             : ${HADOOP_JT_HOST}"
   echo "Mapreduce directory         : ${HADOOP_MAPRED_DIR}"
   echo "Task scheduler              : ${HADOOP_TASK_SCHEDULER}"
   echo "JAVA_HOME directory         : ${JAVA_HOME}"
@@ -222,61 +335,180 @@ if [ "${AUTOMATED}" != "1" ]; then
   fi
 fi
 
-if [ "${AUTOSETUP}" == "1" ]; then
-  # If user wants to setup local system automatically,
-  # set config file generation location to HADOOP_CONF_DIR.
-  DEST=${HADOOP_CONF_DIR}
-else
-  # If user is only interested to generate config file locally,
-  # place config files in the current working directory.
-  DEST=`pwd`
-fi
-
-# remove existing config file, they are existed in current directory.
-rm -f ${DEST}/core-site.xml >/dev/null
-rm -f ${DEST}/hdfs-site.xml >/dev/null
-rm -f ${DEST}/mapred-site.xml >/dev/null
-rm -f ${DEST}/hadoop-env.sh >/dev/null
-
-# Generate config file with specified parameters.
-template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/core-site.xml ${DEST}/core-site.xml
-template_generator ${HADOOP_PREFIX}/share/hadoop/hdfs/templates/hdfs-site.xml ${DEST}/hdfs-site.xml
-template_generator ${HADOOP_PREFIX}/share/hadoop/mapreduce/templates/mapred-site.xml ${DEST}/mapred-site.xml
-template_generator ${HADOOP_CONF_DIR}/hadoop-env.sh.template ${DEST}/hadoop-env.sh
-
-chown root:hadoop ${DEST}/hadoop-env.sh
-chmod 755 ${DEST}/hadoop-env.sh
-
-# Setup directory path and copy config files, if AUTOSETUP is chosen.
 if [ "${AUTOSETUP}" == "1" -o "${AUTOSETUP}" == "y" ]; then
-  mkdir -p ${HADOOP_HDFS_DIR}
-  mkdir -p ${HADOOP_NN_DIR}
-  mkdir -p ${HADOOP_DN_DIR}
-  mkdir -p ${HADOOP_MAPRED_DIR}
+  if [ -d ${KEYTAB_DIR} ]; then
+    chmod 700 ${KEYTAB_DIR}/*
+    chown ${HADOOP_MR_USER}:${HADOOP_GROUP} ${KEYTAB_DIR}/[jt]t.service.keytab
+    chown ${HADOOP_HDFS_USER}:${HADOOP_GROUP} ${KEYTAB_DIR}/[dns]n.service.keytab
+  fi
+  chmod 755 -R ${HADOOP_PREFIX}/sbin/*hadoop*
+  chmod 755 -R ${HADOOP_PREFIX}/bin/hadoop
+  chmod 755 -R ${HADOOP_PREFIX}/libexec/hadoop-config.sh
+  mkdir -p /home/${HADOOP_MR_USER}
+  chown ${HADOOP_MR_USER}:${HADOOP_GROUP} /home/${HADOOP_MR_USER}
+  HDFS_DIR=`echo ${HADOOP_HDFS_DIR} | sed -e 's/,/ /g'`
+  mkdir -p ${HDFS_DIR}
+  if [ -e ${HADOOP_NN_DIR} ]; then
+    rm -rf ${HADOOP_NN_DIR}
+  fi
+  DATANODE_DIR=`echo ${HADOOP_DN_DIR} | sed -e 's/,/ /g'`
+  mkdir -p ${DATANODE_DIR}
+  MAPRED_DIR=`echo ${HADOOP_MAPRED_DIR} | sed -e 's/,/ /g'`
+  mkdir -p ${MAPRED_DIR}
   mkdir -p ${HADOOP_CONF_DIR}
+  check_permission ${HADOOP_CONF_DIR}
+  if [ $? == 1 ]; then
+    echo "Full path to ${HADOOP_CONF_DIR} should be owned by root."
+    exit 1
+  fi
+
   mkdir -p ${HADOOP_LOG_DIR}
-  mkdir -p ${HADOOP_LOG_DIR}/hdfs
-  mkdir -p ${HADOOP_LOG_DIR}/mapred
-  chown hdfs:hadoop ${HADOOP_HDFS_DIR}
-  chown hdfs:hadoop ${HADOOP_NN_DIR}
-  chown hdfs:hadoop ${HADOOP_DN_DIR}
-  chown mapred:hadoop ${HADOOP_MAPRED_DIR}
-  chown root:hadoop ${HADOOP_LOG_DIR}
+  #create the log sub dir for diff users
+  mkdir -p ${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
+  mkdir -p ${HADOOP_LOG_DIR}/${HADOOP_MR_USER}
+
+  mkdir -p ${HADOOP_PID_DIR}
+  chown ${HADOOP_HDFS_USER}:${HADOOP_GROUP} ${HDFS_DIR}
+  chown ${HADOOP_HDFS_USER}:${HADOOP_GROUP} ${DATANODE_DIR}
+  chmod 700 -R ${DATANODE_DIR}
+  chown ${HADOOP_MR_USER}:${HADOOP_GROUP} ${MAPRED_DIR}
+  chown ${HADOOP_HDFS_USER}:${HADOOP_GROUP} ${HADOOP_LOG_DIR}
   chmod 775 ${HADOOP_LOG_DIR}
-  chown hdfs:hadoop ${HADOOP_LOG_DIR}/hdfs
-  chown mapred:hadoop ${HADOOP_LOG_DIR}/mapred
+  chmod 775 ${HADOOP_PID_DIR}
+  chown root:${HADOOP_GROUP} ${HADOOP_PID_DIR}
+
+  #change the permission and the owner
+  chmod 755 ${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
+  chown ${HADOOP_HDFS_USER}:${HADOOP_GROUP} ${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
+  chmod 755 ${HADOOP_LOG_DIR}/${HADOOP_MR_USER}
+  chown ${HADOOP_MR_USER}:${HADOOP_GROUP} ${HADOOP_LOG_DIR}/${HADOOP_MR_USER}
+
+  if [ -e ${HADOOP_CONF_DIR}/core-site.xml ]; then
+    mv -f ${HADOOP_CONF_DIR}/core-site.xml ${HADOOP_CONF_DIR}/core-site.xml.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/hdfs-site.xml ]; then
+    mv -f ${HADOOP_CONF_DIR}/hdfs-site.xml ${HADOOP_CONF_DIR}/hdfs-site.xml.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/mapred-site.xml ]; then
+    mv -f ${HADOOP_CONF_DIR}/mapred-site.xml ${HADOOP_CONF_DIR}/mapred-site.xml.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
+    mv -f ${HADOOP_CONF_DIR}/hadoop-env.sh ${HADOOP_CONF_DIR}/hadoop-env.sh.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/hadoop-policy.xml ]; then
+    mv -f ${HADOOP_CONF_DIR}/hadoop-policy.xml ${HADOOP_CONF_DIR}/hadoop-policy.xml.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/mapred-queue-acls.xml ]; then
+    mv -f ${HADOOP_CONF_DIR}/mapred-queue-acls.xml ${HADOOP_CONF_DIR}/mapred-queue-acls.xml.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/commons-logging.properties ]; then
+    mv -f ${HADOOP_CONF_DIR}/commons-logging.properties ${HADOOP_CONF_DIR}/commons-logging.properties.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/taskcontroller.cfg  ]; then
+    mv -f ${HADOOP_CONF_DIR}/taskcontroller.cfg  ${HADOOP_CONF_DIR}/taskcontroller.cfg.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/slaves  ]; then
+    mv -f ${HADOOP_CONF_DIR}/slaves  ${HADOOP_CONF_DIR}/slaves.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/dfs.include  ]; then
+    mv -f ${HADOOP_CONF_DIR}/dfs.include  ${HADOOP_CONF_DIR}/dfs.include.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/dfs.exclude  ]; then
+    mv -f ${HADOOP_CONF_DIR}/dfs.exclude  ${HADOOP_CONF_DIR}/dfs.exclude.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/mapred.include  ]; then
+    mv -f ${HADOOP_CONF_DIR}/mapred.include  ${HADOOP_CONF_DIR}/mapred.include.bak
+  fi
+  if [ -e ${HADOOP_CONF_DIR}/mapred.exclude  ]; then
+    mv -f ${HADOOP_CONF_DIR}/mapred.exclude  ${HADOOP_CONF_DIR}/mapred.exclude.bak
+  fi
+
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/core-site.xml ${HADOOP_CONF_DIR}/core-site.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hdfs-site.xml ${HADOOP_CONF_DIR}/hdfs-site.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-site.xml ${HADOOP_CONF_DIR}/mapred-site.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-env.sh ${HADOOP_CONF_DIR}/hadoop-env.sh
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-policy.xml ${HADOOP_CONF_DIR}/hadoop-policy.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/commons-logging.properties ${HADOOP_CONF_DIR}/commons-logging.properties
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-queue-acls.xml ${HADOOP_CONF_DIR}/mapred-queue-acls.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/taskcontroller.cfg ${HADOOP_CONF_DIR}/taskcontroller.cfg
+
+  #set the owner of the hadoop dir to root
+  chown root ${HADOOP_PREFIX}
+  chown root:${HADOOP_GROUP} ${HADOOP_CONF_DIR}/hadoop-env.sh
+  chmod 755 ${HADOOP_CONF_DIR}/hadoop-env.sh
+  
+  #set taskcontroller
+  chown root:${HADOOP_GROUP} ${HADOOP_CONF_DIR}/taskcontroller.cfg
+  chmod 400 ${HADOOP_CONF_DIR}/taskcontroller.cfg
+  chown root:${HADOOP_GROUP} ${HADOOP_PREFIX}/bin/task-controller
+  chmod 6050 ${HADOOP_PREFIX}/bin/task-controller
+
+
+  #generate the slaves file and include and exclude files for hdfs and mapred
+  echo '' > ${HADOOP_CONF_DIR}/slaves
+  echo '' > ${HADOOP_CONF_DIR}/dfs.include
+  echo '' > ${HADOOP_CONF_DIR}/dfs.exclude
+  echo '' > ${HADOOP_CONF_DIR}/mapred.include
+  echo '' > ${HADOOP_CONF_DIR}/mapred.exclude
+  for dn in $DATANODES
+  do
+    echo $dn >> ${HADOOP_CONF_DIR}/slaves
+    echo $dn >> ${HADOOP_CONF_DIR}/dfs.include
+  done
+  for tt in $TASKTRACKERS
+  do
+    echo $tt >> ${HADOOP_CONF_DIR}/mapred.include
+  done
+
   echo "Configuration setup is completed."
   if [[ "$HADOOP_NN_HOST" =~ "`hostname`" ]]; then
     echo "Proceed to run hadoop-setup-hdfs.sh on namenode."
   fi
 else
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/core-site.xml ${HADOOP_CONF_DIR}/core-site.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hdfs-site.xml ${HADOOP_CONF_DIR}/hdfs-site.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-site.xml ${HADOOP_CONF_DIR}/mapred-site.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-env.sh ${HADOOP_CONF_DIR}/hadoop-env.sh
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-policy.xml ${HADOOP_CONF_DIR}/hadoop-policy.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/commons-logging.properties ${HADOOP_CONF_DIR}/commons-logging.properties
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-queue-acls.xml ${HADOOP_CONF_DIR}/mapred-queue-acls.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/taskcontroller.cfg ${HADOOP_CONF_DIR}/taskcontroller.cfg
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
+  if [ ! -e ${HADOOP_CONF_DIR}/capacity-scheduler.xml ]; then
+    template_generator ${HADOOP_PREFIX}/share/hadoop/templates/conf/capacity-scheduler.xml ${HADOOP_CONF_DIR}/capacity-scheduler.xml
+  fi
+  
+  chown root:${HADOOP_GROUP} ${HADOOP_CONF_DIR}/hadoop-env.sh
+  chmod 755 ${HADOOP_CONF_DIR}/hadoop-env.sh
+  #set taskcontroller
+  chown root:${HADOOP_GROUP} ${HADOOP_CONF_DIR}/taskcontroller.cfg
+  chmod 400 ${HADOOP_CONF_DIR}/taskcontroller.cfg
+  chown root:${HADOOP_GROUP} ${HADOOP_PREFIX}/bin/task-controller
+  chmod 6050 ${HADOOP_PREFIX}/bin/task-controller
+  
+  #generate the slaves file and include and exclude files for hdfs and mapred
+  echo '' > ${HADOOP_CONF_DIR}/slaves
+  echo '' > ${HADOOP_CONF_DIR}/dfs.include
+  echo '' > ${HADOOP_CONF_DIR}/dfs.exclude
+  echo '' > ${HADOOP_CONF_DIR}/mapred.include
+  echo '' > ${HADOOP_CONF_DIR}/mapred.exclude
+  for dn in $DATANODES
+  do
+    echo $dn >> ${HADOOP_CONF_DIR}/slaves
+    echo $dn >> ${HADOOP_CONF_DIR}/dfs.include
+  done
+  for tt in $TASKTRACKERS
+  do
+    echo $tt >> ${HADOOP_CONF_DIR}/mapred.include
+  done
+  
   echo
-  echo "Configuration file has been generated, please copy:"
+  echo "Configuration file has been generated in:"
   echo
-  echo "core-site.xml"
-  echo "hdfs-site.xml"
-  echo "mapred-site.xml"
-  echo "hadoop-env.sh"
+  echo "${HADOOP_CONF_DIR}/core-site.xml"
+  echo "${HADOOP_CONF_DIR}/hdfs-site.xml"
+  echo "${HADOOP_CONF_DIR}/mapred-site.xml"
+  echo "${HADOOP_CONF_DIR}/hadoop-env.sh"
   echo
   echo " to ${HADOOP_CONF_DIR} on all nodes, and proceed to run hadoop-setup-hdfs.sh on namenode."
 fi

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-hdfs.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-hdfs.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-hdfs.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-hdfs.sh Tue Sep 13 22:49:27 2011
@@ -18,37 +18,65 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-if [ "$HADOOP_HOME" != "" ]; then
-  echo "Warning: \$HADOOP_HOME is deprecated."
-  echo
-fi
-
 . "$bin"/../libexec/hadoop-config.sh
 
 usage() {
   echo "
 usage: $0 <parameters>
-  Require parameter:
-     -c <clusterid>                                Set cluster identifier for HDFS
+
   Optional parameters:
-     -h                                            Display this message
+     --format                                                        Force namenode format
+     --group=hadoop                                                  Set Hadoop group
+     -h                                                              Display this message
+     --hdfs-user=hdfs                                                Set HDFS user
+     --kerberos-realm=KERBEROS.EXAMPLE.COM                           Set Kerberos realm
+     --hdfs-user-keytab=/home/hdfs/hdfs.keytab                       Set HDFS user key tab
+     --mapreduce-user=mr                                             Set mapreduce user
   "
   exit 1
 }
 
-if [ $# != 2 ] ; then
+OPTS=$(getopt \
+  -n $0 \
+  -o '' \
+  -l 'format' \
+  -l 'hdfs-user:' \
+  -l 'hdfs-user-keytab:' \
+  -l 'mapreduce-user:' \
+  -l 'kerberos-realm:' \
+  -o 'h' \
+  -- "$@")
+
+if [ $? != 0 ] ; then
     usage
-    exit 1
 fi
 
-while getopts "hc:" OPTION
-do
-  case $OPTION in
-    c)
-      SETUP_CLUSTER=$2; shift 2
+eval set -- "${OPTS}"
+while true ; do
+  case "$1" in
+    --format)
+      FORMAT_NAMENODE=1; shift
+      AUTOMATED=1
       ;;
-    h)
-      usage
+    --group)
+      HADOOP_GROUP=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --hdfs-user)
+      HADOOP_HDFS_USER=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --mapreduce-user)
+      HADOOP_MR_USER=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --hdfs-user-keytab)
+      HDFS_KEYTAB=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --kerberos-realm)
+      KERBEROS_REALM=$2; shift 2
+      AUTOMATED=1
       ;;
     --)
       shift ; break
@@ -61,30 +89,56 @@ do
   esac
 done
 
-export HADOOP_PREFIX
-export HADOOP_CONF_DIR
-export SETUP_CLUSTER
+HADOOP_GROUP=${HADOOP_GROUP:-hadoop}
+HADOOP_HDFS_USER=${HADOOP_HDFS_USER:-hdfs}
+HADOOP_MAPREDUCE_USER=${HADOOP_MR_USER:-mapred}
+
+if [ "${KERBEROS_REALM}" != "" ]; then
+  # Determine kerberos location base on Linux distro.
+  if [ -e /etc/lsb-release ]; then
+    KERBEROS_BIN=/usr/bin
+  else
+    KERBEROS_BIN=/usr/kerberos/bin
+  fi
+  kinit_cmd="${KERBEROS_BIN}/kinit -k -t ${HDFS_KEYTAB} ${HADOOP_HDFS_USER}"
+  su -c "${kinit_cmd}" ${HADOOP_HDFS_USER}
+fi
 
-# Start namenode and initialize file system structure
 echo "Setup Hadoop Distributed File System"
 echo
-echo "Formatting namenode"
-echo
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format -clusterid ${SETUP_CLUSTER}' hdfs
-echo
+
+# Format namenode
+if [ "${FORMAT_NAMENODE}" == "1" ]; then
+  echo "Formatting namenode"
+  echo
+  su -c "echo Y | ${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} namenode -format" ${HADOOP_HDFS_USER}
+  echo
+fi
+
+# Start namenode process
 echo "Starting namenode process"
 echo
-/etc/init.d/hadoop-namenode start
+if [ -e ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh ]; then
+  DAEMON_PATH=${HADOOP_PREFIX}/sbin
+else
+  DAEMON_PATH=${HADOOP_PREFIX}/bin
+fi
+su -c "${DAEMON_PATH}/hadoop-daemon.sh --config ${HADOOP_CONF_DIR} start namenode" ${HADOOP_HDFS_USER}
 echo
 echo "Initialize HDFS file system: "
 echo
 
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /jobtracker' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /jobtracker' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /user/mapred' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /user/mapred' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /tmp' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chmod 777 /tmp' hdfs
+#create the /user dir 
+su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /user" ${HADOOP_HDFS_USER}
+
+#create /tmp and give it 777
+su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /tmp" ${HADOOP_HDFS_USER}
+su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chmod 777 /tmp" ${HADOOP_HDFS_USER}
+
+#create /mapred
+su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /mapred" ${HADOOP_HDFS_USER}
+su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chmod 700 /mapred" ${HADOOP_HDFS_USER}
+su -c "${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chown ${HADOOP_MAPREDUCE_USER}:system /mapred" ${HADOOP_HDFS_USER}
 
 if [ $? -eq 0 ]; then
   echo "Completed."

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-single-node.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-single-node.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-single-node.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-single-node.sh Tue Sep 13 22:49:27 2011
@@ -17,16 +17,16 @@
 
 # Script for setup HDFS file system for single node deployment
 
-bin=`which $0`
-bin=`dirname ${bin}`
+bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-export HADOOP_PREFIX=${bin}/..
-
-if [ -e /etc/hadoop/hadoop-env.sh ]; then
-  . /etc/hadoop/hadoop-env.sh
+if [ "$HADOOP_HOME" != "" ]; then
+  echo "Warning: \$HADOOP_HOME is deprecated."
+  echo
 fi
 
+. "$bin"/../libexec/hadoop-config.sh
+
 usage() {
   echo "
 usage: $0 <parameters>
@@ -38,7 +38,19 @@ usage: $0 <parameters>
   exit 1
 }
 
-# Parse script parameters
+template_generator() {
+  REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
+  cat $1 |
+  while read line ; do
+    while [[ "$line" =~ $REGEX ]] ; do
+      LHS=${BASH_REMATCH[1]}
+      RHS="$(eval echo "\"$LHS\"")"
+      line=${line//$LHS/$RHS}
+    done
+    echo $line >> $2
+  done
+}
+
 OPTS=$(getopt \
   -n $0 \
   -o '' \
@@ -49,6 +61,10 @@ if [ $? != 0 ] ; then
     usage
 fi
 
+if [ -e /etc/hadoop/hadoop-env.sh ]; then
+  . /etc/hadoop/hadoop-env.sh
+fi
+
 eval set -- "${OPTS}"
 while true ; do
   case "$1" in
@@ -69,7 +85,6 @@ while true ; do
   esac
 done
 
-# Interactive setup wizard
 if [ "${AUTOMATED}" != "1" ]; then
   echo "Welcome to Hadoop single node setup wizard"
   echo
@@ -119,68 +134,59 @@ SET_REBOOT=${SET_REBOOT:-y}
 /etc/init.d/hadoop-jobtracker stop 2>/dev/null >/dev/null
 /etc/init.d/hadoop-tasktracker stop 2>/dev/null >/dev/null
 
-# Default settings
-JAVA_HOME=${JAVA_HOME:-/usr/java/default}
-HADOOP_NN_HOST=${HADOOP_NN_HOST:-hdfs://localhost:9000/}
-HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
-HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
-HADOOP_JT_HOST=${HADOOP_JT_HOST:-localhost:9001}
-HADOOP_HDFS_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/hdfs}
-HADOOP_MAPRED_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/mapred}
-HADOOP_LOG_DIR="/var/log/hadoop"
-HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop}
-HADOOP_REPLICATION=${HADOOP_RELICATION:-1}
-HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
-
-# Setup config files
 if [ "${SET_CONFIG}" == "y" ]; then
+  JAVA_HOME=${JAVA_HOME:-/usr/java/default}
+  HADOOP_NN_HOST=${HADOOP_NN_HOST:-localhost}
+  HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
+  HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
+  HADOOP_JT_HOST=${HADOOP_JT_HOST:-localhost}
+  HADOOP_HDFS_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/hdfs}
+  HADOOP_MAPRED_DIR=${HADOOP_MAPRED_DIR:-/var/lib/hadoop/mapred}
+  HADOOP_PID_DIR=${HADOOP_PID_DIR:-/var/run/hadoop}
+  HADOOP_LOG_DIR="/var/log/hadoop"
+  HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop}
+  HADOOP_REPLICATION=${HADOOP_RELICATION:-1}
   ${HADOOP_PREFIX}/sbin/hadoop-setup-conf.sh --auto \
+    --hdfs-user=hdfs \
+    --mapreduce-user=mapred \
     --conf-dir=${HADOOP_CONF_DIR} \
     --datanode-dir=${HADOOP_DN_DIR} \
     --hdfs-dir=${HADOOP_HDFS_DIR} \
-    --jobtracker-url=${HADOOP_JT_HOST} \
+    --jobtracker-host=${HADOOP_JT_HOST} \
     --log-dir=${HADOOP_LOG_DIR} \
+    --pid-dir=${HADOOP_PID_DIR} \
     --mapred-dir=${HADOOP_MAPRED_DIR} \
     --namenode-dir=${HADOOP_NN_DIR} \
-    --namenode-url=${HADOOP_NN_HOST} \
+    --namenode-host=${HADOOP_NN_HOST} \
     --replication=${HADOOP_REPLICATION}
 fi
 
-export HADOOP_CONF_DIR
-
-# Format namenode
 if [ ! -e ${HADOOP_NN_DIR} ]; then
   rm -rf ${HADOOP_HDFS_DIR} 2>/dev/null >/dev/null
   mkdir -p ${HADOOP_HDFS_DIR}
   chmod 755 ${HADOOP_HDFS_DIR}
   chown hdfs:hadoop ${HADOOP_HDFS_DIR}
-  su -c '${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode -format -clusterid hadoop' hdfs
+  /etc/init.d/hadoop-namenode format
 elif [ "${SET_FORMAT}" == "y" ]; then
   rm -rf ${HADOOP_HDFS_DIR} 2>/dev/null >/dev/null
   mkdir -p ${HADOOP_HDFS_DIR}
   chmod 755 ${HADOOP_HDFS_DIR}
   chown hdfs:hadoop ${HADOOP_HDFS_DIR}
-  rm -rf /var/lib/hadoop/hdfs/namenode
-  su -c '${HADOOP_PREFIX}/bin/hdfs --config ${HADOOP_CONF_DIR} namenode -format -clusterid hadoop' hdfs
+  rm -rf ${HADOOP_NN_DIR}
+  /etc/init.d/hadoop-namenode format
 fi
 
-# Start hdfs service
 /etc/init.d/hadoop-namenode start
 /etc/init.d/hadoop-datanode start
 
-# Initialize file system structure
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /user/mapred' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /user/mapred' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /tmp' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chmod 777 /tmp' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -mkdir /jobtracker' hdfs
-su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} fs -chown mapred:mapred /jobtracker' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chown mapred:mapred /user/mapred' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -mkdir /tmp' hdfs
+su -c '${HADOOP_PREFIX}/bin/hadoop --config ${HADOOP_CONF_DIR} dfs -chmod 777 /tmp' hdfs
 
-# Start mapreduce service
 /etc/init.d/hadoop-jobtracker start
 /etc/init.d/hadoop-tasktracker start
 
-# Toggle service startup on reboot
 if [ "${SET_REBOOT}" == "y" ]; then
   if [ -e /etc/debian_version ]; then
     ln -sf ../init.d/hadoop-namenode /etc/rc2.d/S90hadoop-namenode
@@ -203,7 +209,6 @@ if [ "${SET_REBOOT}" == "y" ]; then
   fi
 fi
 
-# Shutdown service, if user choose to stop services after setup
 if [ "${STARTUP}" != "y" ]; then
   /etc/init.d/hadoop-namenode stop
   /etc/init.d/hadoop-datanode stop

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-datanode
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-datanode?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-datanode (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-datanode Tue Sep 13 22:49:27 2011
@@ -27,10 +27,15 @@ source /etc/default/hadoop-env.sh
 RETVAL=0
 PIDFILE="${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid"
 desc="Hadoop datanode daemon"
+HADOOP_PREFIX="/usr"
 
 start() {
   echo -n $"Starting $desc (hadoop-datanode): "
-  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+    daemon ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  else
+    daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" start datanode
+  fi
   RETVAL=$?
   echo
   [ $RETVAL -eq 0 ] && touch /var/lock/subsys/hadoop-datanode
@@ -39,7 +44,11 @@ start() {
 
 stop() {
   echo -n $"Stopping $desc (hadoop-datanode): "
-  daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+    daemon ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  else
+    daemon --user hdfs ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh --config "${HADOOP_CONF_DIR}" stop datanode
+  fi
   RETVAL=$?
   sleep 5
   echo

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-jobtracker
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-jobtracker?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-jobtracker (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-jobtracker Tue Sep 13 22:49:27 2011
@@ -27,6 +27,7 @@ source /etc/default/hadoop-env.sh
 RETVAL=0
 PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-jobtracker.pid"
 desc="Hadoop jobtracker daemon"
+export HADOOP_PREFIX="/usr"
 
 start() {
   echo -n $"Starting $desc (hadoop-jobtracker): "

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-namenode
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-namenode?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-namenode (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-namenode Tue Sep 13 22:49:27 2011
@@ -27,6 +27,7 @@ source /etc/default/hadoop-env.sh
 RETVAL=0
 PIDFILE="${HADOOP_PID_DIR}/hadoop-hdfs-namenode.pid"
 desc="Hadoop namenode daemon"
+export HADOOP_PREFIX="/usr"
 
 start() {
   echo -n $"Starting $desc (hadoop-namenode): "

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-tasktracker
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-tasktracker?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-tasktracker (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/init.d/hadoop-tasktracker Tue Sep 13 22:49:27 2011
@@ -27,6 +27,7 @@ source /etc/default/hadoop-env.sh
 RETVAL=0
 PIDFILE="${HADOOP_PID_DIR}/hadoop-mapred-tasktracker.pid"
 desc="Hadoop tasktracker daemon"
+export HADOOP_PREFIX="/usr"
 
 start() {
   echo -n $"Starting $desc (hadoop-tasktracker): "

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml Tue Sep 13 22:49:27 2011
@@ -1,27 +1,78 @@
 <?xml version="1.0"?>
 <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
 
 <!-- Put site-specific property overrides in this file. -->
 
 <configuration>
+
+  <property>
+    <name>local.realm</name>
+    <value>${KERBEROS_REALM}</value>
+  </property>
+
+  <!-- file system properties -->
+
   <property>
     <name>fs.default.name</name>
-    <value>${HADOOP_NN_HOST}</value>
+    <value>hdfs://${HADOOP_NN_HOST}:8020</value>
+    <description>The name of the default file system.  Either the
+      literal string "local" or a host:port for NDFS.
+    </description>
+    <final>true</final>
+  </property>
+
+  <property>
+    <name>fs.trash.interval</name>
+    <value>360</value>
+    <description>Number of minutes between trash checkpoints.
+      If zero, the trash feature is disabled.
+    </description>
+  </property>
+  
+  <property>
+    <name>hadoop.security.auth_to_local</name>
+    <value>
+	RULE:[2:$1@$0]([jt]t@.*${KERBEROS_REALM})s/.*/${HADOOP_MR_USER}/
+        RULE:[2:$1@$0]([nd]n@.*${KERBEROS_REALM})s/.*/${HADOOP_HDFS_USER}/
+        RULE:[2:$1@$0](mapred@.*${KERBEROS_REALM})s/.*/${HADOOP_MR_USER}/
+        RULE:[2:$1@$0](hdfs@.*${KERBEROS_REALM})s/.*/${HADOOP_HDFS_USER}/
+        RULE:[2:$1@$0](mapredqa@.*${KERBEROS_REALM})s/.*/${HADOOP_MR_USER}/
+        RULE:[2:$1@$0](hdfsqa@.*${KERBEROS_REALM})s/.*/${HADOOP_HDFS_USER}/
+        DEFAULT
+    </value>
+    <description></description>
+  </property>
+
+  <property>
+    <name>hadoop.security.authentication</name>
+    <value>${SECURITY_TYPE}</value>
+    <description>
+      Set the authentication for the cluster. Valid values are: simple or
+      kerberos.
+    </description>
   </property>
+
+  <property>
+    <name>hadoop.security.authorization</name>
+    <value>${SECURITY}</value>
+    <description>
+      Enable authorization for different protocols.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.security.groups.cache.secs</name>
+    <value>14400</value>
+  </property>
+
+  <property>
+    <name>hadoop.kerberos.kinit.command</name>
+    <value>${KINIT}</value>
+  </property>
+
+  <property>
+    <name>hadoop.http.filter.initializers</name>
+    <value>org.apache.hadoop.http.lib.StaticUserWebFilter</value>
+  </property>
+
 </configuration>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/update-hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/update-hadoop-env.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/update-hadoop-env.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/update-hadoop-env.sh Tue Sep 13 22:49:27 2011
@@ -134,7 +134,9 @@ if [ "${UNINSTALL}" -eq "1" ]; then
     rm -rf ${HADOOP_PREFIX}/etc/hadoop
   fi
   rm -f /etc/default/hadoop-env.sh
-  rm -f /etc/profile.d/hadoop-env.sh
+  if [ -d /etc/profile.d ]; then
+    rm -f /etc/profile.d/hadoop-env.sh
+  fi
 else
   # Create symlinks
   if [ "${HADOOP_CONF_DIR}" != "${HADOOP_PREFIX}/etc/hadoop" ]; then
@@ -142,7 +144,9 @@ else
     ln -sf ${HADOOP_CONF_DIR} ${HADOOP_PREFIX}/etc/hadoop
   fi
   ln -sf ${HADOOP_CONF_DIR}/hadoop-env.sh /etc/default/hadoop-env.sh
-  ln -sf ${HADOOP_CONF_DIR}/hadoop-env.sh /etc/profile.d/hadoop-env.sh
+  if [ -d /etc/profile.d ]; then
+    ln -sf ${HADOOP_CONF_DIR}/hadoop-env.sh /etc/profile.d/hadoop-env.sh
+  fi
 
   mkdir -p ${HADOOP_LOG_DIR}
   chown root:hadoop ${HADOOP_LOG_DIR}

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 13 22:49:27 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1166484
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1170371
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278