You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:17:44 UTC

svn commit: r1077464 - in /hadoop/common/branches/branch-0.20-security-patches: ./ .eclipse.templates/ bin/ ivy/ src/c++/jsvc/ src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ src/core/org/apache/hadoop/http/ src/hdfs/org/apache/hadoop/hdfs/...

Author: omalley
Date: Fri Mar  4 04:17:43 2011
New Revision: 1077464

URL: http://svn.apache.org/viewvc?rev=1077464&view=rev
Log:
commit d9ec286d47568d5a6b4baedfad148ee3b31c3fe1
Author: Jakob Homan <jh...@yahoo-inc.com>
Date:   Fri May 14 16:37:07 2010 -0700

    HDFS:1150 from https://issues.apache.org/jira/secure/attachment/12444541/HDFS-1150-Y20S-ready-8.patch
    
    +++ b/YAHOO-CHANGES.txt
    +    HDFS-1150. Verify datanodes' identities to clients in secure clusters.
    +    (jghoman)
    +

Added:
    hadoop/common/branches/branch-0.20-security-patches/src/c++/jsvc/
    hadoop/common/branches/branch-0.20-security-patches/src/c++/jsvc/build.sh
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
Modified:
    hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath
    hadoop/common/branches/branch-0.20-security-patches/bin/hadoop
    hadoop/common/branches/branch-0.20-security-patches/build.xml
    hadoop/common/branches/branch-0.20-security-patches/ivy.xml
    hadoop/common/branches/branch-0.20-security-patches/ivy/hadoop-core.pom
    hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties
    hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java

Modified: hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath (original)
+++ hadoop/common/branches/branch-0.20-security-patches/.eclipse.templates/.classpath Fri Mar  4 04:17:43 2011
@@ -14,12 +14,13 @@
 	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
 	<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-cli-1.2.jar"/>
-	<classpathentry kind="lib" path="lib/hsqldb-1.8.0.10.jar"/>
+  <classpathentry kind="lib" path="lib/hsqldb-1.8.0.10.jar"/>
 	<classpathentry kind="lib" path="lib/kfs-0.2.2.jar"/>
   	<classpathentry kind="lib" path="lib/jsp-2.1/jsp-2.1.jar"/>
   	<classpathentry kind="lib" path="lib/jsp-2.1/jsp-api-2.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-codec-1.4.jar"/>
-	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-httpclient-3.0.1.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-daemon-1.0.1.jar" />
+  <classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-httpclient-3.0.1.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/commons-el-1.0.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jasper-compiler-5.5.12.jar"/>
 	<classpathentry kind="lib" path="build/ivy/lib/Hadoop/common/jasper-runtime-5.5.12.jar"/>

Modified: hadoop/common/branches/branch-0.20-security-patches/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/bin/hadoop?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/bin/hadoop (original)
+++ hadoop/common/branches/branch-0.20-security-patches/bin/hadoop Fri Mar  4 04:17:43 2011
@@ -199,7 +199,11 @@ elif [ "$COMMAND" = "secondarynamenode" 
   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
 elif [ "$COMMAND" = "datanode" ] ; then
   CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
+  if [[ $EUID -eq 0 ]]; then
+    HADOOP_OPTS="$HADOOP_OPTS -jvm server $HADOOP_DATANODE_OPTS"
+  else
+    HADOOP_OPTS="$HADOOP_OPTS -server $HADOOP_DATANODE_OPTS"
+  fi
 elif [ "$COMMAND" = "fs" ] ; then
   CLASS=org.apache.hadoop.fs.FsShell
   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
@@ -299,5 +303,12 @@ if [ "x$JAVA_LIBRARY_PATH" != "x" ]; the
 fi  
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
 
-# run it
-exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+# Check to see if we should start a secure datanode
+if [ "$COMMAND" = "datanode" ]; then
+  if [[ $EUID -eq 0 ]]; then
+    exec "jsvc" -outfile /dev/stdout -errfile /dev/stderr -cp "$CLASSPATH" -nodetach -user hdfs -cp "$CLASSPATH" $JAVA_HEAP_MAX $HADOOP_OPTS org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
+  fi
+else
+  # run it
+  exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+fi

Modified: hadoop/common/branches/branch-0.20-security-patches/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/build.xml?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/build.xml Fri Mar  4 04:17:43 2011
@@ -175,6 +175,13 @@
    -Dtask-controller.install.dir=$HADOOP_HOME/bin -->
   <property name="task-controller.install.dir" value="${dist.dir}/bin" />
   <!-- end of task-controller properties -->
+
+  <!-- jsvc properties set here -->
+  <property name="c++.jsvc.src" 
+    value="${basedir}/src/c++/jsvc" />
+  <property name="build.c++.jsvc"
+    value="${build.c++}/jsvc" />
+  <property name="jsvc.install.dir" value="${dist.dir}/bin" />
 	
   <!-- IVY properteis set here -->
   <property name="ivy.dir" location="ivy" />
@@ -1714,7 +1721,7 @@
   </target>
 
   <target name="compile-c++" 
-          depends="compile-c++-pipes"/>
+          depends="compile-c++-pipes, jsvc"/>
 
   <target name="create-c++-examples-pipes-makefile" 
           depends="check-c++-makefiles" 
@@ -2251,4 +2258,16 @@
   </target>
 
   <!-- end of task-controller targets -->
+
+  <!-- jsvc targets -->
+  <target name="jsvc">
+    <mkdir dir="${build.c++.jsvc}" />
+    <exec executable="sh" dir="${build.c++.jsvc}"
+        failonerror="yes">
+        <arg value="${c++.jsvc.src}/build.sh" />
+    </exec>
+    <copy file="${build.c++.jsvc}/jsvc" todir="${jsvc.install.dir}" 
+        verbose="true" />
+  </target>
+
 </project>

Modified: hadoop/common/branches/branch-0.20-security-patches/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy.xml?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy.xml Fri Mar  4 04:17:43 2011
@@ -159,7 +159,12 @@
       rev="${commons-el.version}"
       conf="jetty->master"/>
 
-
+    <!--Configuration: secure datanode -->
+    <dependency org="commons-daemon" 
+      name="commons-daemon" 
+      rev="${commons-daemon.version}"
+      conf="server->default" /> 
+    
     <!--Configuration: commons-logging -->
 
     <!--it is essential that only the master JAR of commons logging
@@ -275,5 +280,5 @@
       rev="${aspectj.version}"
       conf="common->default">
     </dependency>
-  </dependencies>
+ </dependencies>
 </ivy-module>

Modified: hadoop/common/branches/branch-0.20-security-patches/ivy/hadoop-core.pom
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy/hadoop-core.pom?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy/hadoop-core.pom (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy/hadoop-core.pom Fri Mar  4 04:17:43 2011
@@ -253,5 +253,11 @@
       <version>0.52</version>
       <scope>optional</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.directory.daemon</groupId>
+      <artifactId>daemon-plugin</artifactId>
+      <version>${commons-daemon.version}</version>
+    </dependency>
+
   </dependencies>
 </project>

Modified: hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties (original)
+++ hadoop/common/branches/branch-0.20-security-patches/ivy/libraries.properties Fri Mar  4 04:17:43 2011
@@ -27,6 +27,7 @@ checkstyle.version=4.2
 commons-cli.version=1.2
 commons-codec.version=1.4
 commons-collections.version=3.1
+commons-daemon.version=1.0.1
 commons-httpclient.version=3.0.1
 commons-lang.version=2.4
 commons-logging.version=1.0.4

Added: hadoop/common/branches/branch-0.20-security-patches/src/c++/jsvc/build.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/c%2B%2B/jsvc/build.sh?rev=1077464&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/c++/jsvc/build.sh (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/c++/jsvc/build.sh Fri Mar  4 04:17:43 2011
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+###Define variables###
+
+#This variable defines the name of the jsvc tar file.
+#It should be modified if a different version of jsvc is needed.
+JSVC_SRC_DIR=commons-daemon-1.0.2-src
+JSVC_SRC_TAR_FILE=${JSVC_SRC_DIR}.tar.gz
+
+#This variable defines the link where the jsvc source tar is located.
+JSVC_SRC_TAR_LOCATION=http://www.apache.org/dist/commons/daemon/source/${JSVC_SRC_TAR_FILE}
+
+JSVC_SRC_CODE_DIR=src/native/unix
+JSVC_EXECUTABLE=jsvc
+
+###Download and untar###
+
+wget --no-check-certificate $JSVC_SRC_TAR_LOCATION
+tar zxf $JSVC_SRC_TAR_FILE
+
+###Now build###
+
+cd $JSVC_SRC_DIR/$JSVC_SRC_CODE_DIR
+sh support/buildconf.sh
+./configure 
+make clean
+make
+cd -
+cp $JSVC_SRC_DIR/$JSVC_SRC_CODE_DIR/$JSVC_EXECUTABLE .

Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyHttpServer.java Fri Mar  4 04:17:43 2011
@@ -46,7 +46,7 @@ public class ProxyHttpServer extends Htt
   }
 
   /** {@inheritDoc} */
-  protected Connector createBaseListener(Configuration conf)
+  public Connector createBaseListener(Configuration conf)
       throws IOException {
     final String sAddr;
     if (null == (sAddr = conf.get("proxy.http.test.listener.addr"))) {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java Fri Mar  4 04:17:43 2011
@@ -98,6 +98,8 @@ public class HttpServer implements Filte
   private static final int MAX_RETRIES = 10;
   private final Configuration conf;
 
+  private boolean listenerStartedExternally = false;
+
   /** Same as this(name, bindAddress, port, findPort, null); */
   public HttpServer(String name, String bindAddress, int port, boolean findPort
       ) throws IOException {
@@ -106,9 +108,14 @@ public class HttpServer implements Filte
 
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf) throws IOException {
-    this(name, bindAddress, port, findPort, conf, null);
+    this(name, bindAddress, port, findPort, conf, null, null);
   }
 
+  public HttpServer(String name, String bindAddress, int port,
+      boolean findPort, Configuration conf, Connector connector) throws IOException {
+    this(name, bindAddress, port, findPort, conf, null, connector);
+  }
+  
   /**
    * Create a status server on the given port.
    * The jsp scripts are taken from src/webapps/<name>.
@@ -122,14 +129,27 @@ public class HttpServer implements Filte
   public HttpServer(String name, String bindAddress, int port,
       boolean findPort, Configuration conf, AccessControlList adminsAcl)
       throws IOException {
+    this(name, bindAddress, port, findPort, conf, adminsAcl, null);
+  }
+
+  public HttpServer(String name, String bindAddress, int port,
+      boolean findPort, Configuration conf, AccessControlList adminsAcl, 
+      Connector connector) throws IOException{
     webServer = new Server();
     this.findPort = findPort;
     this.conf = conf;
     this.adminsAcl = adminsAcl;
 
-    listener = createBaseListener(conf);
-    listener.setHost(bindAddress);
-    listener.setPort(port);
+    if(connector == null) {
+      listenerStartedExternally = false;
+      listener = createBaseListener(conf);
+      listener.setHost(bindAddress);
+      listener.setPort(port);
+    } else {
+      listenerStartedExternally = true;
+      listener = connector;
+    }
+    
     webServer.addConnector(listener);
 
     webServer.setThreadPool(new QueuedThreadPool());
@@ -167,16 +187,21 @@ public class HttpServer implements Filte
    * provided. This wrapper and all subclasses must create at least one
    * listener.
    */
-  protected Connector createBaseListener(Configuration conf)
+  public Connector createBaseListener(Configuration conf)
       throws IOException {
+    return HttpServer.createDefaultChannelConnector();
+  }
+  
+  // LimitedPrivate for creating secure datanodes
+  public static Connector createDefaultChannelConnector() {
     SelectChannelConnector ret = new SelectChannelConnector();
     ret.setLowResourceMaxIdleTime(10000);
     ret.setAcceptQueueSize(128);
     ret.setResolveNames(false);
     ret.setUseDirectBuffers(false);
-    return ret;
+    return ret;   
   }
-
+  
   /** Get an array of FilterConfiguration specified in the conf */
   private static FilterInitializer[] getFilterInitializers(Configuration conf) {
     if (conf == null) {
@@ -508,68 +533,76 @@ public class HttpServer implements Filte
    */
   public void start() throws IOException {
     try {
-      int port = 0;
-      int oriPort = listener.getPort(); // The original requested port
-      while (true) {
-        try {
-          port = webServer.getConnectors()[0].getLocalPort();
-          LOG.info("Port returned by webServer.getConnectors()[0]." +
-          		"getLocalPort() before open() is "+ port + 
-          		". Opening the listener on " + oriPort);
-          listener.open();
-          port = listener.getLocalPort();
-          LOG.info("listener.getLocalPort() returned " + listener.getLocalPort() + 
-                " webServer.getConnectors()[0].getLocalPort() returned " +
-                webServer.getConnectors()[0].getLocalPort());
-          //Workaround to handle the problem reported in HADOOP-4744
-          if (port < 0) {
-            Thread.sleep(100);
-            int numRetries = 1;
-            while (port < 0) {
-              LOG.warn("listener.getLocalPort returned " + port);
-              if (numRetries++ > MAX_RETRIES) {
-                throw new Exception(" listener.getLocalPort is returning " +
-                		"less than 0 even after " +numRetries+" resets");
-              }
-              for (int i = 0; i < 2; i++) {
-                LOG.info("Retrying listener.getLocalPort()");
-                port = listener.getLocalPort();
+      if(listenerStartedExternally) { // Expect that listener was started securely
+        if(listener.getLocalPort() == -1) // ... and verify
+          throw new Exception("Exepected webserver's listener to be started" +
+          		"previously but wasn't");
+        // And skip all the port rolling issues.
+        webServer.start();
+      } else {
+        int port = 0;
+        int oriPort = listener.getPort(); // The original requested port
+        while (true) {
+          try {
+            port = webServer.getConnectors()[0].getLocalPort();
+            LOG.info("Port returned by webServer.getConnectors()[0]." +
+            		"getLocalPort() before open() is "+ port + 
+            		". Opening the listener on " + oriPort);
+            listener.open();
+            port = listener.getLocalPort();
+            LOG.info("listener.getLocalPort() returned " + listener.getLocalPort() + 
+                  " webServer.getConnectors()[0].getLocalPort() returned " +
+                  webServer.getConnectors()[0].getLocalPort());
+            //Workaround to handle the problem reported in HADOOP-4744
+            if (port < 0) {
+              Thread.sleep(100);
+              int numRetries = 1;
+              while (port < 0) {
+                LOG.warn("listener.getLocalPort returned " + port);
+                if (numRetries++ > MAX_RETRIES) {
+                  throw new Exception(" listener.getLocalPort is returning " +
+                  		"less than 0 even after " +numRetries+" resets");
+                }
+                for (int i = 0; i < 2; i++) {
+                  LOG.info("Retrying listener.getLocalPort()");
+                  port = listener.getLocalPort();
+                  if (port > 0) {
+                    break;
+                  }
+                  Thread.sleep(200);
+                }
                 if (port > 0) {
                   break;
                 }
-                Thread.sleep(200);
+                LOG.info("Bouncing the listener");
+                listener.close();
+                Thread.sleep(1000);
+                listener.setPort(oriPort == 0 ? 0 : (oriPort += 1));
+                listener.open();
+                Thread.sleep(100);
+                port = listener.getLocalPort();
               }
-              if (port > 0) {
-                break;
+            } //Workaround end
+            LOG.info("Jetty bound to port " + port);
+            webServer.start();
+            break;
+          } catch (IOException ex) {
+            // if this is a bind exception,
+            // then try the next port number.
+            if (ex instanceof BindException) {
+              if (!findPort) {
+                throw (BindException) ex;
               }
-              LOG.info("Bouncing the listener");
-              listener.close();
-              Thread.sleep(1000);
-              listener.setPort(oriPort == 0 ? 0 : (oriPort += 1));
-              listener.open();
-              Thread.sleep(100);
-              port = listener.getLocalPort();
-            }
-          } //Workaround end
-          LOG.info("Jetty bound to port " + port);
-          webServer.start();
-          break;
-        } catch (IOException ex) {
-          // if this is a bind exception,
-          // then try the next port number.
-          if (ex instanceof BindException) {
-            if (!findPort) {
-              throw (BindException) ex;
-            }
-          } else {
-            LOG.info("HttpServer.start() threw a non Bind IOException"); 
+            } else {
+              LOG.info("HttpServer.start() threw a non Bind IOException"); 
+              throw ex;
+           }
+          } catch (MultiException ex) {
+            LOG.info("HttpServer.start() threw a MultiException"); 
             throw ex;
           }
-        } catch (MultiException ex) {
-          LOG.info("HttpServer.start() threw a MultiException"); 
-          throw ex;
+          listener.setPort((oriPort += 1));
         }
-        listener.setPort((oriPort += 1));
       }
     } catch (IOException e) {
       throw e;

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1077464&r1=1077463&r2=1077464&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java Fri Mar  4 04:17:43 2011
@@ -71,6 +71,7 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
+import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources;
 import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.FileChecksumServlets;
@@ -220,6 +221,8 @@ public class DataNode extends Configured
   // For InterDataNodeProtocol
   public Server ipcServer;
 
+  private SecureResources secureResources = null;
+  
   /**
    * Current system time.
    * @return current time in msec.
@@ -234,6 +237,16 @@ public class DataNode extends Configured
    */
   DataNode(final Configuration conf, 
            final AbstractList<File> dataDirs) throws IOException {
+    this(conf, dataDirs, null);
+  }
+  
+  /**
+   * Start a Datanode with specified server sockets for secure environments
+   * where they are run with privileged ports and injected from a higher
+   * level of capability
+   */
+  DataNode(final Configuration conf,
+           final AbstractList<File> dataDirs, SecureResources resources) throws IOException {
     super(conf);
     UserGroupInformation.setConfiguration(conf);
     SecurityUtil.login(conf, DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
@@ -242,11 +255,11 @@ public class DataNode extends Configured
     datanodeObject = this;
 
     try {
-      startDataNode(conf, dataDirs);
+      startDataNode(conf, dataDirs, resources);
     } catch (IOException ie) {
       shutdown();
       throw ie;
-    }
+    }   
   }
     
   
@@ -261,8 +274,13 @@ public class DataNode extends Configured
    * @throws IOException
    */
   void startDataNode(Configuration conf, 
-                     AbstractList<File> dataDirs
+                     AbstractList<File> dataDirs, SecureResources resources
                      ) throws IOException {
+    if(UserGroupInformation.isSecurityEnabled() && resources == null)
+      throw new RuntimeException("Cannot start secure cluster without " +
+      		"privileged resources.");
+    
+    this.secureResources = resources;
     // use configured nameserver & interface to get local hostname
     if (conf.get("slave.host.name") != null) {
       machineName = conf.get("slave.host.name");   
@@ -283,12 +301,8 @@ public class DataNode extends Configured
     this.transferToAllowed = conf.getBoolean("dfs.datanode.transferTo.allowed", 
                                              true);
     this.writePacketSize = conf.getInt("dfs.write.packet.size", 64*1024);
-    String address = 
-      NetUtils.getServerAddress(conf,
-                                "dfs.datanode.bindAddress", 
-                                "dfs.datanode.port",
-                                "dfs.datanode.address");
-    InetSocketAddress socAddr = NetUtils.createSocketAddr(address);
+
+    InetSocketAddress socAddr = DataNode.getStreamingAddr(conf);
     int tmpPort = socAddr.getPort();
     storage = new DataStorage();
     // construct registration
@@ -332,10 +346,15 @@ public class DataNode extends Configured
     }
 
       
-    // find free port
-    ServerSocket ss = (socketWriteTimeout > 0) ? 
-          ServerSocketChannel.open().socket() : new ServerSocket();
-    Server.bind(ss, socAddr, 0);
+    // find free port or use privileged port provide
+    ServerSocket ss;
+    if(secureResources == null) {
+      ss = (socketWriteTimeout > 0) ? 
+        ServerSocketChannel.open().socket() : new ServerSocket();
+      Server.bind(ss, socAddr, 0);
+    } else {
+      ss = resources.getStreamingSocket();
+    }
     ss.setReceiveBufferSize(DEFAULT_DATA_SOCKET_SIZE); 
     // adjust machine name with the actual port
     tmpPort = ss.getLocalPort();
@@ -376,16 +395,13 @@ public class DataNode extends Configured
     }
 
     //create a servlet to serve full-file content
-    String infoAddr = 
-      NetUtils.getServerAddress(conf, 
-                              "dfs.datanode.info.bindAddress", 
-                              "dfs.datanode.info.port",
-                              "dfs.datanode.http.address");
-    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
+    InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
     String infoHost = infoSocAddr.getHostName();
     int tmpInfoPort = infoSocAddr.getPort();
-    this.infoServer = new HttpServer("datanode", infoHost, tmpInfoPort,
-        tmpInfoPort == 0, conf);
+    this.infoServer = (secureResources == null) 
+       ? new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0, conf)
+       : new HttpServer("datanode", infoHost, tmpInfoPort, tmpInfoPort == 0, conf, 
+           secureResources.getListener());
     if (conf.getBoolean("dfs.https.enable", false)) {
       boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
       InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
@@ -429,6 +445,18 @@ public class DataNode extends Configured
   }
 
   /**
+   * Determine the http server's effective addr
+   */
+  public static InetSocketAddress getInfoAddr(Configuration conf) {
+    String infoAddr = NetUtils.getServerAddress(conf, 
+        "dfs.datanode.info.bindAddress", 
+        "dfs.datanode.info.port",
+        "dfs.datanode.http.address");
+    
+    return NetUtils.createSocketAddr(infoAddr); 
+  }
+
+  /**
    * Creates either NIO or regular depending on socketWriteTimeout.
    */
   protected Socket newSocket() throws IOException {
@@ -1322,6 +1350,16 @@ public class DataNode extends Configured
    */
   public static DataNode instantiateDataNode(String args[],
                                       Configuration conf) throws IOException {
+    return instantiateDataNode(args, conf, null);
+  }
+  
+  /** Instantiate a single datanode object. This must be run by invoking
+   *  {@link DataNode#runDatanodeDaemon(DataNode)} subsequently. 
+   * @param resources Secure resources needed to run under Kerberos
+   */
+  public static DataNode instantiateDataNode(String args[],
+                                      Configuration conf, 
+                                      SecureResources resources) throws IOException {
     if (conf == null)
       conf = new Configuration();
     if (!parseArguments(args, conf)) {
@@ -1336,7 +1374,7 @@ public class DataNode extends Configured
     String[] dataDirs = conf.getStrings(DATA_DIR_KEY);
     dnThreadName = "DataNode: [" +
                         StringUtils.arrayToString(dataDirs) + "]";
-    return makeInstance(dataDirs, conf);
+    return makeInstance(dataDirs, conf, resources);
   }
 
   /** Instantiate & Start a single datanode daemon and wait for it to finish.
@@ -1344,7 +1382,17 @@ public class DataNode extends Configured
    */
   public static DataNode createDataNode(String args[],
                                  Configuration conf) throws IOException {
-    DataNode dn = instantiateDataNode(args, conf);
+    return createDataNode(args, conf, null);
+  }
+  
+  
+  /** Instantiate & Start a single datanode daemon and wait for it to finish.
+   *  If this thread is specifically interrupted, it will stop waiting.
+   *  LimitedPrivate for creating secure datanodes
+   */
+  public static DataNode createDataNode(String args[],
+            Configuration conf, SecureResources resources) throws IOException {
+    DataNode dn = instantiateDataNode(args, conf, resources);
     runDatanodeDaemon(dn);
     return dn;
   }
@@ -1364,12 +1412,13 @@ public class DataNode extends Configured
    * @param dataDirs List of directories, where the new DataNode instance should
    * keep its files.
    * @param conf Configuration instance to use.
+   * @param resources Secure resources needed to run under Kerberos
    * @return DataNode instance for given list of data dirs and conf, or null if
    * no directory from this directory list can be created.
    * @throws IOException
    */
-  public static DataNode makeInstance(String[] dataDirs, Configuration conf)
-    throws IOException {
+  public static DataNode makeInstance(String[] dataDirs, Configuration conf, 
+      SecureResources resources) throws IOException {
     LocalFileSystem localFS = FileSystem.getLocal(conf);
     ArrayList<File> dirs = new ArrayList<File>();
     FsPermission dataDirPermission = 
@@ -1385,7 +1434,7 @@ public class DataNode extends Configured
       }
     }
     if (dirs.size() > 0) 
-      return new DataNode(conf, dirs);
+      return new DataNode(conf, dirs, resources);
     LOG.error("All directories in " + DATA_DIR_KEY + " are invalid.");
     return null;
   }
@@ -1465,18 +1514,20 @@ public class DataNode extends Configured
     return data;
   }
 
-  /**
-   */
-  public static void main(String args[]) {
+  public static void secureMain(String [] args, SecureResources resources) {
     try {
       StringUtils.startupShutdownMessage(DataNode.class, args, LOG);
-      DataNode datanode = createDataNode(args, null);
+      DataNode datanode = createDataNode(args, null, resources);
       if (datanode != null)
         datanode.join();
     } catch (Throwable e) {
       LOG.error(StringUtils.stringifyException(e));
       System.exit(-1);
-    }
+    }   
+  }
+  
+  public static void main(String args[]) {
+    secureMain(args, null);
   }
 
   // InterDataNodeProtocol implementation
@@ -1735,4 +1786,13 @@ public class DataNode extends Configured
     LOG.info(who + " calls recoverBlock(block=" + block
         + ", targets=[" + msg + "])");
   }
+  
+  public static InetSocketAddress getStreamingAddr(Configuration conf) {
+    String address = 
+      NetUtils.getServerAddress(conf,
+                                "dfs.datanode.bindAddress", 
+                                "dfs.datanode.port",
+                                "dfs.datanode.address");
+    return NetUtils.createSocketAddr(address);
+  }
 }

Added: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java?rev=1077464&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java Fri Mar  4 04:17:43 2011
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.hdfs.server.datanode;
+
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
+
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.nio.channels.ServerSocketChannel;
+
+import org.apache.commons.daemon.Daemon;
+import org.apache.commons.daemon.DaemonContext;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.server.common.HdfsConstants;
+import org.apache.hadoop.http.HttpServer;
+import org.mortbay.jetty.nio.SelectChannelConnector;
+
+/**
+ * Utility class to start a datanode in a secure cluster, first obtaining 
+ * privileged resources before main startup and handing them to the datanode.
+ */
+public class SecureDataNodeStarter implements Daemon {
+  /**
+   * Stash necessary resources needed for datanode operation in a secure env.
+   */
+  public static class SecureResources {
+    private final ServerSocket streamingSocket;
+    private final SelectChannelConnector listener;
+    public SecureResources(ServerSocket streamingSocket,
+        SelectChannelConnector listener) {
+
+      this.streamingSocket = streamingSocket;
+      this.listener = listener;
+    }
+
+    public ServerSocket getStreamingSocket() { return streamingSocket; }
+
+    public SelectChannelConnector getListener() { return listener; }
+  }
+  
+  private String [] args;
+  private SecureResources resources;
+  
+  @Override
+  public void init(DaemonContext context) throws Exception {
+    System.err.println("Initializing secure datanode resources");
+    // We should only start up a secure datanode in a Kerberos-secured cluster
+    Configuration conf = new Configuration(); // Skip UGI method to not log in
+    if(!conf.get(HADOOP_SECURITY_AUTHENTICATION).equals("kerberos"))
+      throw new RuntimeException("Cannot start secure datanode in unsecure cluster");
+    
+    // Stash command-line arguments for regular datanode
+    args = context.getArguments();
+    
+    // Obtain secure port for data streaming to datanode
+    InetSocketAddress socAddr = DataNode.getStreamingAddr(conf);
+    int socketWriteTimeout = conf.getInt("dfs.datanode.socket.write.timeout",
+        HdfsConstants.WRITE_TIMEOUT);
+    
+    ServerSocket ss = (socketWriteTimeout > 0) ? 
+        ServerSocketChannel.open().socket() : new ServerSocket();
+    ss.bind(socAddr, 0);
+    
+    // Check that we got the port we need
+    if(ss.getLocalPort() != socAddr.getPort())
+      throw new RuntimeException("Unable to bind on specified streaming port in secure " +
+      		"context. Needed " + socAddr.getPort() + ", got " + ss.getLocalPort());
+
+    // Obtain secure listener for web server
+    SelectChannelConnector listener = 
+                   (SelectChannelConnector)HttpServer.createDefaultChannelConnector();
+    InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
+    listener.setHost(infoSocAddr.getHostName());
+    listener.setPort(infoSocAddr.getPort());
+    // Open listener here in order to bind to port as root
+    listener.open(); 
+    if(listener.getPort() != infoSocAddr.getPort())
+      throw new RuntimeException("Unable to bind on specified info port in secure " +
+          "context. Needed " + socAddr.getPort() + ", got " + ss.getLocalPort());
+    System.err.println("Successfully obtained privileged resources (streaming port = "
+        + ss + " ) (http listener port = " + listener.getConnection() +")");
+    
+    if(ss.getLocalPort() >= 1023 || listener.getPort() >= 1023)
+      System.err.println("Warning: Starting secure datanode with unprivileged ports");
+    
+    resources = new SecureResources(ss, listener);
+  }
+
+  @Override
+  public void start() throws Exception {
+    System.err.println("Starting regular datanode initialization");
+    DataNode.secureMain(args, resources);
+  }
+  
+  @Override public void destroy() { /* Nothing to do */ }
+  @Override public void stop() throws Exception { /* Nothing to do */ }
+}