You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:11:55 UTC

svn commit: r1077407 [2/2] - in /hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy: ./ conf/ src/java/org/apache/hadoop/hdfsproxy/ src/test/org/apache/hadoop/hdfsproxy/

Added: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java?rev=1077407&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyForwardServlet.java Fri Mar  4 04:11:54 2011
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.IOException;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * 
+ *
+ */
+public class ProxyForwardServlet extends HttpServlet {
+  /**
+   * 
+   */
+  private static final long serialVersionUID = 1L;
+  private static Configuration configuration = null;
+  public static final Log LOG = LogFactory.getLog(ProxyForwardServlet.class);
+
+  /** {@inheritDoc} */
+  @Override
+  public void init() throws ServletException {
+    ServletContext context = getServletContext();
+    configuration = (Configuration) context
+        .getAttribute("org.apache.hadoop.hdfsproxy.conf");
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void doGet(HttpServletRequest request, HttpServletResponse response)
+      throws IOException, ServletException {
+    String hostname = request.getServerName();
+
+    String version = configuration.get(hostname);
+    if (version == null) {
+      // extract from hostname directly
+      String[] strs = hostname.split("[-\\.]");
+      version = "/" + strs[0];
+    }
+
+    ServletContext curContext = getServletContext();
+    ServletContext dstContext = curContext.getContext(version);
+
+    // avoid infinite forwarding.
+    if (dstContext == null
+        || "HDFS Proxy Forward".equals(dstContext.getServletContextName())) {
+      LOG.error("Context (" + version
+          + ".war) non-exist or restricted from access");
+      response.sendError(HttpServletResponse.SC_NOT_FOUND);
+      return;
+    }
+    LOG.debug("Request to " + hostname + " is forwarded to version " + version);
+    forwardRequest(request, response, dstContext, request.getServletPath());
+
+  }
+
+  /** {@inheritDoc} */
+  public void forwardRequest(HttpServletRequest request,
+      HttpServletResponse response, ServletContext context, String pathInfo)
+      throws IOException, ServletException {
+    String path = buildForwardPath(request, pathInfo);
+    RequestDispatcher dispatcher = context.getRequestDispatcher(path);
+    if (dispatcher == null) {
+      LOG.info("There was no such dispatcher: " + path);
+      response.sendError(HttpServletResponse.SC_NO_CONTENT);
+      return;
+    }
+    dispatcher.forward(request, response);
+  }
+
+  /** {@inheritDoc} */
+  protected String buildForwardPath(HttpServletRequest request, String pathInfo) {
+    String path = pathInfo;
+    if (request.getPathInfo() != null) {
+      path += request.getPathInfo();
+    }
+    if (request.getQueryString() != null) {
+      path += "?" + request.getQueryString();
+    }
+    return path;
+  }
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=1077407&r1=1077406&r2=1077407&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java Fri Mar  4 04:11:54 2011
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hdfsproxy;
 
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 
 import org.apache.hadoop.conf.Configuration;
@@ -32,6 +34,8 @@ public class ProxyListPathsServlet exten
   @Override
   protected UserGroupInformation getUGI(HttpServletRequest request,
                                         Configuration conf) {
-    return (UserGroupInformation) request.getAttribute("authorized.ugi");
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    return ProxyUtil.getProxyUGIFor(userID);
   }
 }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=1077407&r1=1077406&r2=1077407&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java Fri Mar  4 04:11:54 2011
@@ -38,6 +38,8 @@ public class ProxyStreamFile extends Str
   @Override
   protected UserGroupInformation getUGI(HttpServletRequest request,
                                         Configuration conf) {
-    return (UserGroupInformation) request.getAttribute("authorized.ugi");
+    String userID = (String) request
+        .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
+    return ProxyUtil.getProxyUGIFor(userID);
   }
 }

Added: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=1077407&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java Fri Mar  4 04:11:54 2011
@@ -0,0 +1,353 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.security.KeyStore;
+import java.security.cert.X509Certificate;
+import java.util.Date;
+import java.util.Set;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSession;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+import javax.net.ssl.X509TrustManager;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSInputStream;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.HostsFileReader;
+
+/**
+ * Proxy Utility .
+ */
+public class ProxyUtil {
+  public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
+  private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
+  private static final int CERT_EXPIRATION_WARNING_THRESHOLD = 30; // 30 days
+
+  // warning
+
+  private static enum UtilityOption {
+    RELOAD("-reloadPermFiles"), GET("-get"), CHECKCERTS(
+        "-checkcerts");
+
+    private String name = null;
+
+    private UtilityOption(String arg) {
+      this.name = arg;
+    }
+
+    public String getName() {
+      return name;
+    }
+  }
+
+  /**
+   * Dummy hostname verifier that is used to bypass hostname checking
+   */
+  private static class DummyHostnameVerifier implements HostnameVerifier {
+    public boolean verify(String hostname, SSLSession session) {
+      return true;
+    }
+  }
+
+  /**
+   * Dummy trustmanager that is used to bypass server certificate checking
+   */
+  private static class DummyTrustManager implements X509TrustManager {
+    public void checkClientTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public void checkServerTrusted(X509Certificate[] chain, String authType) {
+    }
+
+    public X509Certificate[] getAcceptedIssuers() {
+      return null;
+    }
+  }
+
+  private static HttpsURLConnection openConnection(String hostname, int port,
+      String path) throws IOException {
+    try {
+      final URL url = new URI("https", null, hostname, port, path, null, null)
+          .toURL();
+      HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
+      // bypass hostname verification
+      conn.setHostnameVerifier(new DummyHostnameVerifier());
+      conn.setRequestMethod("GET");
+      return conn;
+    } catch (URISyntaxException e) {
+      throw (IOException) new IOException().initCause(e);
+    }
+  }
+
+  private static void setupSslProps(Configuration conf) throws IOException {
+    FileInputStream fis = null;
+    try {
+      SSLContext sc = SSLContext.getInstance("SSL");
+      KeyManager[] kms = null;
+      TrustManager[] tms = null;
+      if (conf.get("ssl.client.keystore.location") != null) {
+        // initialize default key manager with keystore file and pass
+        KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
+        KeyStore ks = KeyStore.getInstance(conf.get("ssl.client.keystore.type",
+            "JKS"));
+        char[] ksPass = conf.get("ssl.client.keystore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(conf.get("ssl.client.keystore.location",
+            "keystore.jks"));
+        ks.load(fis, ksPass);
+        kmf.init(ks, conf.get("ssl.client.keystore.keypassword", "changeit")
+            .toCharArray());
+        kms = kmf.getKeyManagers();
+        fis.close();
+        fis = null;
+      }
+      // initialize default trust manager with keystore file and pass
+      if (conf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
+        // by pass trustmanager validation
+        tms = new DummyTrustManager[] { new DummyTrustManager() };
+      } else {
+        TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
+        KeyStore ts = KeyStore.getInstance(conf.get(
+            "ssl.client.truststore.type", "JKS"));
+        char[] tsPass = conf.get("ssl.client.truststore.password", "changeit")
+            .toCharArray();
+        fis = new FileInputStream(conf.get("ssl.client.truststore.location",
+            "truststore.jks"));
+        ts.load(fis, tsPass);
+        tmf.init(ts);
+        tms = tmf.getTrustManagers();
+      }
+      sc.init(kms, tms, new java.security.SecureRandom());
+      HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
+    } catch (Exception e) {
+      throw new IOException("Could not initialize SSLContext", e);
+    } finally {
+      if (fis != null) {
+        fis.close();
+      }
+    }
+  }
+
+  static InetSocketAddress getSslAddr(Configuration conf) throws IOException {
+    String addr = conf.get("hdfsproxy.https.address");
+    if (addr == null)
+      throw new IOException("HdfsProxy address is not specified");
+    return NetUtils.createSocketAddr(addr);
+  }
+
+  static boolean sendCommand(Configuration conf, String path)
+      throws IOException {
+    setupSslProps(conf);
+    int sslPort = getSslAddr(conf).getPort();
+    int err = 0;
+    StringBuilder b = new StringBuilder();
+
+    HostsFileReader hostsReader = new HostsFileReader(conf.get(
+        "hdfsproxy.hosts", "hdfsproxy-hosts"), "");
+    Set<String> hostsList = hostsReader.getHosts();
+    for (String hostname : hostsList) {
+      HttpsURLConnection connection = null;
+      try {
+        connection = openConnection(hostname, sslPort, path);
+        connection.connect();
+        if (LOG.isDebugEnabled()) {
+          StringBuffer sb = new StringBuffer();
+          X509Certificate[] clientCerts = (X509Certificate[]) connection
+              .getLocalCertificates();
+          if (clientCerts != null) {
+            for (X509Certificate cert : clientCerts)
+              sb.append("\n Client certificate Subject Name is "
+                  + cert.getSubjectX500Principal().getName());
+          } else {
+            sb.append("\n No client certificates were found");
+          }
+          X509Certificate[] serverCerts = (X509Certificate[]) connection
+              .getServerCertificates();
+          if (serverCerts != null) {
+            for (X509Certificate cert : serverCerts)
+              sb.append("\n Server certificate Subject Name is "
+                  + cert.getSubjectX500Principal().getName());
+          } else {
+            sb.append("\n No server certificates were found");
+          }
+          LOG.debug(sb.toString());
+        }
+        if (connection.getResponseCode() != HttpServletResponse.SC_OK) {
+          b.append("\n\t" + hostname + ": " + connection.getResponseCode()
+              + " " + connection.getResponseMessage());
+          err++;
+        }
+      } catch (IOException e) {
+        b.append("\n\t" + hostname + ": " + e.getLocalizedMessage());
+        if (LOG.isDebugEnabled())
+          LOG.debug("Exception happend for host " + hostname, e);
+        err++;
+      } finally {
+        if (connection != null)
+          connection.disconnect();
+      }
+    }
+    if (err > 0) {
+      System.err.print("Command failed on the following " + err + " host"
+          + (err == 1 ? ":" : "s:") + b.toString() + "\n");
+      return false;
+    }
+    return true;
+  }
+
+  static FSDataInputStream open(Configuration conf, String hostname, int port,
+      String path) throws IOException {
+    setupSslProps(conf);
+    HttpURLConnection connection = null;
+    connection = openConnection(hostname, port, path);
+    connection.connect();
+    final InputStream in = connection.getInputStream();
+    return new FSDataInputStream(new FSInputStream() {
+      public int read() throws IOException {
+        return in.read();
+      }
+
+      public int read(byte[] b, int off, int len) throws IOException {
+        return in.read(b, off, len);
+      }
+
+      public void close() throws IOException {
+        in.close();
+      }
+
+      public void seek(long pos) throws IOException {
+        throw new IOException("Can't seek!");
+      }
+
+      public long getPos() throws IOException {
+        throw new IOException("Position unknown!");
+      }
+
+      public boolean seekToNewSource(long targetPos) throws IOException {
+        return false;
+      }
+    });
+  }
+
+  static void checkServerCertsExpirationDays(Configuration conf,
+      String hostname, int port) throws IOException {
+    setupSslProps(conf);
+    HttpsURLConnection connection = null;
+    connection = openConnection(hostname, port, null);
+    connection.connect();
+    X509Certificate[] serverCerts = (X509Certificate[]) connection
+        .getServerCertificates();
+    Date curDate = new Date();
+    long curTime = curDate.getTime();
+    if (serverCerts != null) {
+      for (X509Certificate cert : serverCerts) {
+        StringBuffer sb = new StringBuffer();
+        sb.append("\n Server certificate Subject Name: "
+            + cert.getSubjectX500Principal().getName());
+        Date expDate = cert.getNotAfter();
+        long expTime = expDate.getTime();
+        int dayOffSet = (int) ((expTime - curTime) / MM_SECONDS_PER_DAY);
+        sb.append(" have " + dayOffSet + " days to expire");
+        if (dayOffSet < CERT_EXPIRATION_WARNING_THRESHOLD)
+          LOG.warn(sb.toString());
+        else
+          LOG.info(sb.toString());
+      }
+    } else {
+      LOG.info("\n No Server certs was found");
+    }
+
+    if (connection != null) {
+      connection.disconnect();
+    }
+  }
+
+  public static UserGroupInformation getProxyUGIFor(String userID) {
+    LOG.debug("Proxying as " + userID);
+    try {
+      return UserGroupInformation.createProxyUser(userID, UserGroupInformation.getLoginUser());
+    } catch (IOException e) {
+      throw new RuntimeException("Unable get current logged in user", e);
+    }
+  }
+
+  public static String getNamenode(Configuration conf) throws ServletException {
+    String nn = conf.get("fs.default.name");
+     if (nn == null) {
+       throw new ServletException(
+           "Proxy source cluster name node address not specified");
+     }
+     return nn;
+  }
+
+  public static void main(String[] args) throws Exception {
+    if (args.length < 1
+        || (!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])
+            && !UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && !UtilityOption.CHECKCERTS
+            .getName().equalsIgnoreCase(args[0]))
+        || (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) && args.length != 4)
+        || (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) && args.length != 3)) {
+      System.err.println("Usage: ProxyUtil [" + UtilityOption.RELOAD.getName()
+          + "] | ["
+          + UtilityOption.GET.getName() + " <hostname> <#port> <path> ] | ["
+          + UtilityOption.CHECKCERTS.getName() + " <hostname> <#port> ]");
+      System.exit(0);
+    }
+    Configuration conf = new Configuration(false);
+    conf.addResource("ssl-client.xml");
+    conf.addResource("hdfsproxy-default.xml");
+
+    if (UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])) {
+      // reload user-certs.xml and user-permissions.xml files
+      sendCommand(conf, "/reloadPermFiles");
+    } else if (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0])) {
+      checkServerCertsExpirationDays(conf, args[1], Integer.parseInt(args[2]));
+    } else {
+      String hostname = args[1];
+      int port = Integer.parseInt(args[2]);
+      String path = args[3];
+      InputStream in = open(conf, hostname, port, path);
+      IOUtils.copyBytes(in, System.out, conf, false);
+      in.close();
+    }
+  }
+
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java?rev=1077407&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/DummyLdapContext.java Fri Mar  4 04:11:54 2011
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.util.ArrayList;
+import java.util.Hashtable;
+
+import javax.naming.NamingEnumeration;
+import javax.naming.NamingException;
+import javax.naming.directory.Attribute;
+import javax.naming.directory.Attributes;
+import javax.naming.directory.BasicAttribute;
+import javax.naming.directory.BasicAttributes;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.Control;
+import javax.naming.ldap.InitialLdapContext;
+
+class DummyLdapContext extends InitialLdapContext {
+  class ResultEnum<T> implements NamingEnumeration<T> {
+    private ArrayList<T> rl;
+
+    public ResultEnum() {
+      rl = new ArrayList<T>();
+    }
+
+    public ResultEnum(ArrayList<T> al) {
+      rl = al;
+    }
+
+    public boolean hasMoreElements() {
+      return !rl.isEmpty();
+    }
+
+    public T nextElement() {
+      T t = rl.get(0);
+      rl.remove(0);
+      return t;
+    }
+
+    public boolean hasMore() throws NamingException {
+      return !rl.isEmpty();
+    }
+
+    public T next() throws NamingException {
+      T t = rl.get(0);
+      rl.remove(0);
+      return t;
+    }
+
+    public void close() throws NamingException {
+    }
+  }
+
+  public DummyLdapContext() throws NamingException {
+  }
+
+  public DummyLdapContext(Hashtable<?, ?> environment, Control[] connCtls)
+      throws NamingException {
+  }
+
+  public NamingEnumeration<SearchResult> search(String name,
+      Attributes matchingAttributes, String[] attributesToReturn)
+      throws NamingException {
+    System.out.println("Searching Dummy LDAP Server Results:");
+    if (!"ou=proxyroles,dc=mycompany,dc=com".equalsIgnoreCase(name)) {
+      System.out.println("baseName mismatch");
+      return new ResultEnum<SearchResult>();
+    }
+    if (!"cn=127.0.0.1".equals((String) matchingAttributes.get("uniqueMember")
+        .get())) {
+      System.out.println("Ip address mismatch");
+      return new ResultEnum<SearchResult>();
+    }
+    BasicAttributes attrs = new BasicAttributes();
+    BasicAttribute uidAttr = new BasicAttribute("uid", "testuser");
+    attrs.put(uidAttr);
+    BasicAttribute groupAttr = new BasicAttribute("userClass", "testgroup");
+    attrs.put(groupAttr);
+    BasicAttribute locAttr = new BasicAttribute("documentLocation", "/testdir");
+    attrs.put(locAttr);
+    SearchResult sr = new SearchResult(null, null, attrs);
+    ArrayList<SearchResult> al = new ArrayList<SearchResult>();
+    al.add(sr);
+    NamingEnumeration<SearchResult> ne = new ResultEnum<SearchResult>(al);
+    return ne;
+  }
+
+  @SuppressWarnings("unchecked")
+  public static void main(String[] args) throws Exception {
+    DummyLdapContext dlc = new DummyLdapContext();
+    String baseName = "ou=proxyroles,dc=mycompany,dc=com";
+    Attributes matchAttrs = new BasicAttributes(true);
+    String[] attrIDs = { "uid", "documentLocation" };
+    NamingEnumeration<SearchResult> results = dlc.search(baseName, matchAttrs,
+        attrIDs);
+    if (results.hasMore()) {
+      SearchResult sr = results.next();
+      Attributes attrs = sr.getAttributes();
+      for (NamingEnumeration ne = attrs.getAll(); ne.hasMore();) {
+        Attribute attr = (Attribute) ne.next();
+        if ("uid".equalsIgnoreCase(attr.getID())) {
+          System.out.println("User ID = " + attr.get());
+        } else if ("documentLocation".equalsIgnoreCase(attr.getID())) {
+          System.out.println("Document Location = ");
+          for (NamingEnumeration e = attr.getAll(); e.hasMore();) {
+            System.out.println(e.next());
+          }
+        }
+      }
+    }
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/FindFreePort.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/FindFreePort.java?rev=1077407&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/FindFreePort.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/FindFreePort.java Fri Mar  4 04:11:54 2011
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import java.net.ServerSocket;
+import java.io.IOException;
+import java.net.BindException;
+import java.util.Random;
+
+public class FindFreePort {
+  private static final int MIN_AVAILABLE_PORT = 10000;
+  private static final int MAX_AVAILABLE_PORT = 65535;
+  private static Random random = new Random();
+  /**
+   * 
+   * @param num <= 0, find a single free port
+   * @return free port next to port (>port)
+   * @throws IOException
+   */
+  public static int findFreePort(int port) throws IOException {
+    ServerSocket server;
+    if (port < 0) {
+      server =  new ServerSocket(0);      
+    } else {
+      int freePort = port+1;
+      while (true) {
+        try {
+          server =  new ServerSocket(freePort);
+          break;
+        } catch (IOException e) {
+          if (e instanceof BindException) {
+            if (freePort >= MAX_AVAILABLE_PORT || 
+                freePort < MIN_AVAILABLE_PORT) {
+              throw e;
+            }
+          } else {
+            throw e;
+          }
+          freePort += 1;
+        }
+      }
+    }
+    int fport = server.getLocalPort();
+    server.close();
+    return fport;    
+  }
+ /**
+  * 
+  * @return
+  * @throws IOException
+  */
+  public static int findFreePortRandom() throws IOException {
+    return findFreePort(MIN_AVAILABLE_PORT + random.nextInt(MAX_AVAILABLE_PORT - MIN_AVAILABLE_PORT + 1));
+  }
+   
+
+  public static void main(String[] args) throws Exception {
+    if(args.length < 1) {       
+      System.err.println("Usage: FindFreePort < -random / <#port> >");        
+      System.exit(0);      
+    }
+    int j = 0;
+    String cmd = args[j++];
+    if ("-random".equals(cmd)) {
+      System.out.println(findFreePortRandom());
+    } else {
+      System.out.println(findFreePort(Integer.parseInt(cmd)));
+    }   
+  }
+        
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java?rev=1077407&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/SimpleServlet.java Fri Mar  4 04:11:54 2011
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import java.io.PrintWriter;
+import java.io.IOException;
+
+
+/**
+ * simple servlet for forward testing purpose
+ */
+
+public class SimpleServlet extends HttpServlet {
+ 
+  /**
+   * 
+   */
+  private static final long serialVersionUID = 1L;
+
+  public void doGet(HttpServletRequest request, HttpServletResponse response)
+    throws IOException {
+	  response.setContentType("text/html");
+	  PrintWriter out = response.getWriter();	  
+	  out.print("<html><head/><body>");
+    out.print("A GET request");
+    out.print("</body></html>");
+	  out.close();
+	  return;
+  }
+
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java?rev=1077407&r1=1077406&r2=1077407&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestHdfsProxy.java Fri Mar  4 04:11:54 2011
@@ -32,6 +32,7 @@ import org.apache.log4j.Level;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
@@ -39,8 +40,6 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.tools.DistCp;
-import org.apache.hadoop.util.ToolRunner;
 
 /**
  * A JUnit test for HdfsProxy
@@ -51,7 +50,6 @@ public class TestHdfsProxy extends TestC
         .getLogger().setLevel(Level.OFF);
     ((Log4JLogger) DataNode.LOG).getLogger().setLevel(Level.OFF);
     ((Log4JLogger) FSNamesystem.LOG).getLogger().setLevel(Level.OFF);
-    ((Log4JLogger) DistCp.LOG).getLogger().setLevel(Level.ALL);
   }
 
   static final URI LOCAL_FS = URI.create("file:///");
@@ -209,7 +207,6 @@ public class TestHdfsProxy extends TestC
       cluster = new MiniDFSCluster(dfsConf, 2, true, null);
       cluster.waitActive();
 
-      final DistCp distcp = new DistCp(dfsConf);
       final FileSystem localfs = FileSystem.get(LOCAL_FS, dfsConf);
       final FileSystem hdfs = cluster.getFileSystem();
       final Configuration proxyConf = new Configuration(false);
@@ -219,12 +216,11 @@ public class TestHdfsProxy extends TestC
       final String namenode = hdfs.getUri().toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR + "/srcdat");
-        ToolRunner.run(distcp, new String[] { "-log", namenode + "/logs",
-            "file:///" + TEST_ROOT_DIR + "/srcdat", namenode + "/destdat" });
+        hdfs.copyFromLocalFile
+	    (new Path("file:///" + TEST_ROOT_DIR + "/srcdat"),
+             new Path(namenode + "/destdat" ));
         assertTrue("Source and destination directories do not match.",
             checkFiles(hdfs, "/destdat", files));
-        assertTrue("Log directory does not exist.", hdfs.exists(new Path(
-            namenode + "/logs")));
 
         proxyConf.set("proxy.http.test.listener.addr", "localhost:0");
         proxy = new HdfsProxy(proxyConf);
@@ -232,15 +228,18 @@ public class TestHdfsProxy extends TestC
         InetSocketAddress proxyAddr = NetUtils.createSocketAddr("localhost:0");
         final String realProxyAddr = proxyAddr.getHostName() + ":"
             + proxy.getPort();
-
-        ToolRunner.run(distcp, new String[] {
-            "hftp://" + realProxyAddr + "/destdat", namenode + "/copied1" });
+        final Path proxyUrl = new Path("hftp://" + realProxyAddr);
+	final FileSystem hftp = proxyUrl.getFileSystem(dfsConf);
+        FileUtil.copy(hftp, new Path(proxyUrl, "/destdat"),
+                      hdfs, new Path(namenode + "/copied1"),
+                      false, true, proxyConf);
+        
         assertTrue("Source and copied directories do not match.", checkFiles(
             hdfs, "/copied1", files));
 
-        ToolRunner.run(distcp, new String[] {
-            "hftp://" + realProxyAddr + "/destdat",
-            "file:///" + TEST_ROOT_DIR + "/copied2" });
+        FileUtil.copy(hftp, new Path(proxyUrl, "/destdat"),
+                      localfs, new Path(TEST_ROOT_DIR + "/copied2"),
+                      false, true, proxyConf);
         assertTrue("Source and copied directories do not match.", checkFiles(
             localfs, TEST_ROOT_DIR + "/copied2", files));
 

Added: hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java?rev=1077407&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/contrib/hdfsproxy/src/test/org/apache/hadoop/hdfsproxy/TestProxyUtil.java Fri Mar  4 04:11:54 2011
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfsproxy;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+
+/** Unit tests for ProxyUtil */
+public class TestProxyUtil extends TestCase {
+  
+  private static String TEST_PROXY_CONF_DIR = System.getProperty("test.proxy.conf.dir", "./conf");
+  private static String TEST_PROXY_HTTPS_PORT = System.getProperty("test.proxy.https.port", "8443");
+
+  public void testSendCommand() throws Exception {
+      
+    Configuration conf = new Configuration(false);  
+    conf.addResource("ssl-client.xml");
+    conf.addResource("hdfsproxy-default.xml");
+    String address = "localhost:" + TEST_PROXY_HTTPS_PORT;
+    conf.set("hdfsproxy.https.address", address);
+    String hostFname = TEST_PROXY_CONF_DIR + "/hdfsproxy-hosts";
+    conf.set("hdfsproxy.hosts", hostFname);    
+    
+    assertTrue(ProxyUtil.sendCommand(conf, "/test/reloadPermFiles"));
+    assertTrue(ProxyUtil.sendCommand(conf, "/test/clearUgiCache"));    
+    
+    conf.set("hdfsproxy.https.address", "localhost:0");
+    assertFalse(ProxyUtil.sendCommand(conf, "/test/reloadPermFiles"));
+    assertFalse(ProxyUtil.sendCommand(conf, "/test/reloadPermFiles"));
+  }
+ 
+}