You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cd...@apache.org on 2008/10/21 07:04:50 UTC

svn commit: r706518 - in /hadoop/core/trunk: CHANGES.txt src/core/org/apache/hadoop/http/FilterContainer.java src/core/org/apache/hadoop/http/HttpServer.java src/test/org/apache/hadoop/http/TestGlobalFilter.java

Author: cdouglas
Date: Mon Oct 20 22:04:50 2008
New Revision: 706518

URL: http://svn.apache.org/viewvc?rev=706518&view=rev
Log:
HADOOP-4284. Support filters that apply to all requests, or global filters,
to HttpServer. Contributed by Kan Zhang.

Added:
    hadoop/core/trunk/src/test/org/apache/hadoop/http/TestGlobalFilter.java
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java
    hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=706518&r1=706517&r2=706518&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Mon Oct 20 22:04:50 2008
@@ -43,6 +43,9 @@
     HADOOP-4238. When listing jobs, if scheduling information isn't available 
     print NA instead of empty output. (Sreekanth Ramakrishnan via johan)
 
+    HADOOP-4284. Support filters that apply to all requests, or global filters,
+    to HttpServer. (Kan Zhang via cdouglas)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java?rev=706518&r1=706517&r2=706518&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/http/FilterContainer.java Mon Oct 20 22:04:50 2008
@@ -30,4 +30,11 @@
    * @param parameters a map from parameter names to initial values
    */
   void addFilter(String name, String classname, Map<String, String> parameters);
+  /**
+   * Add a global filter to the container.
+   * @param name filter name
+   * @param classname filter class name
+   * @param parameters a map from parameter names to initial values
+   */
+  void addGlobalFilter(String name, String classname, Map<String, String> parameters);
 }

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java?rev=706518&r1=706517&r2=706518&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/http/HttpServer.java Mon Oct 20 22:04:50 2008
@@ -24,6 +24,7 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.HashMap;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
@@ -35,10 +36,8 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.log.LogLevel;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.mortbay.http.HttpContext;
 import org.mortbay.http.SocketListener;
 import org.mortbay.http.SslListener;
-import org.mortbay.http.handler.ResourceHandler;
 import org.mortbay.jetty.servlet.Dispatcher;
 import org.mortbay.jetty.servlet.FilterHolder;
 import org.mortbay.jetty.servlet.WebApplicationContext;
@@ -60,6 +59,8 @@
 
   protected final org.mortbay.jetty.Server webServer;
   protected final WebApplicationContext webAppContext;
+  protected final Map<WebApplicationContext, Boolean> defaultContexts = 
+      new HashMap<WebApplicationContext, Boolean>();
   protected final boolean findPort;
   protected final SocketListener listener;
   private SslListener sslListener;
@@ -91,6 +92,7 @@
 
     final String appDir = getWebAppsPath();
     webAppContext = webServer.addWebApplication("/", appDir + "/" + name);
+    addDefaultApps(appDir);
 
     final FilterInitializer[] initializers = getFilterInitializers(conf); 
     if (initializers != null) {
@@ -98,7 +100,7 @@
         c.initFilter(this);
       }
     }
-    addWebapps(appDir);
+    addDefaultServlets();
   }
 
   /** Get an array of FilterConfiguration specified in the conf */
@@ -121,11 +123,11 @@
   }
 
   /**
-   * Add webapps and servlets.
+   * Add default apps.
    * @param appDir The application directory
    * @throws IOException
    */
-  protected void addWebapps(final String appDir) throws IOException {
+  protected void addDefaultApps(final String appDir) throws IOException {
     // set up the context for "/logs/" if "hadoop.log.dir" property is defined. 
     String logDir = System.getProperty("hadoop.log.dir");
     if (logDir != null) {
@@ -134,7 +136,12 @@
 
     // set up the context for "/static/*"
     addContext("/static/*", appDir + "/static", true);
-
+  }
+  
+  /**
+   * Add default servlets.
+   */
+  protected void addDefaultServlets() {
     // set up default servlets
     addServlet("stacks", "/stacks", StackServlet.class);
     addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
@@ -145,16 +152,11 @@
    * @param pathSpec The path spec for the context
    * @param dir The directory containing the context
    * @param isFiltered if true, the servlet is added to the filter path mapping 
+   * @throws IOException
    */
-  protected void addContext(String pathSpec, String dir, boolean isFiltered) {
-    HttpContext context = new HttpContext();
-    context.setContextPath(pathSpec);
-    context.setResourceBase(dir);
-    context.addHandler(new ResourceHandler());
-    webServer.addContext(context);
-    if (isFiltered) {
-      addFilterPathMapping(pathSpec);
-    }
+  protected void addContext(String pathSpec, String dir, boolean isFiltered) throws IOException {
+    WebApplicationContext webAppCtx = webServer.addWebApplication(pathSpec, dir);
+    defaultContexts.put(webAppCtx, isFiltered);
   }
 
   /**
@@ -176,7 +178,7 @@
   public void addServlet(String name, String pathSpec,
       Class<? extends HttpServlet> clazz) {
     addInternalServlet(name, pathSpec, clazz);
-    addFilterPathMapping(pathSpec);
+    addFilterPathMapping(pathSpec, webAppContext);
   }
 
   /**
@@ -207,11 +209,42 @@
   /** {@inheritDoc} */
   public void addFilter(String name, String classname,
       Map<String, String> parameters) {
-    WebApplicationHandler handler = webAppContext.getWebApplicationHandler();
 
-    LOG.info("adding " + name + " (class=" + classname + ")");
+    final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
+    defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS);
+    for (Map.Entry<WebApplicationContext, Boolean> e : defaultContexts
+        .entrySet()) {
+      if (e.getValue()) {
+        WebApplicationContext ctx = e.getKey();
+        defineFilter(ctx, name, classname, parameters, USER_FACING_URLS);
+        WebApplicationHandler handler = ctx.getWebApplicationHandler();
+        handler.addFilterPathMapping(ctx.getContextPath() + "/*", name,
+            Dispatcher.__ALL);
+        LOG.info("Added filter " + name + " (class=" + classname
+            + ") to context path " + ctx.getContextPath() + "/*");
+      }
+    }
     filterNames.add(name);
+  }
 
+  /** {@inheritDoc} */
+  public void addGlobalFilter(String name, String classname,
+      Map<String, String> parameters) {
+    final String[] ALL_URLS = { "/*" };
+    defineFilter(webAppContext, name, classname, parameters, ALL_URLS);
+    for (WebApplicationContext ctx : defaultContexts.keySet()) {
+      defineFilter(ctx, name, classname, parameters, ALL_URLS);
+    }
+    LOG.info("Added global filter" + name + " (class=" + classname + ")");
+  }
+
+  /**
+   * Define a filter for a context and set up default url mappings.
+   */
+  protected void defineFilter(WebApplicationContext ctx, String name,
+      String classname, Map<String, String> parameters, String[] urls) {
+
+    WebApplicationHandler handler = ctx.getWebApplicationHandler();
     FilterHolder holder = handler.defineFilter(name, classname);
     if (parameters != null) {
       for(Map.Entry<String, String> e : parameters.entrySet()) {
@@ -219,8 +252,7 @@
       }
     }
 
-    final String[] USER_FACING_URLS = {"*.html", "*.jsp"};
-    for(String url : USER_FACING_URLS) {
+    for (String url : urls) {
       handler.addFilterPathMapping(url, name, Dispatcher.__ALL);
     }
   }
@@ -228,14 +260,15 @@
   /**
    * Add the path spec to the filter path mapping.
    * @param pathSpec The path spec
+   * @param webAppCtx The WebApplicationContext to add to
    */
-  protected void addFilterPathMapping(String pathSpec) {
-    WebApplicationHandler handler = webAppContext.getWebApplicationHandler();
+  protected void addFilterPathMapping(String pathSpec, WebApplicationContext webAppCtx) {
+    WebApplicationHandler handler = webAppCtx.getWebApplicationHandler();
     for(String name : filterNames) {
       handler.addFilterPathMapping(pathSpec, name, Dispatcher.__ALL);
     }
   }
-
+  
   /**
    * Get the value in the webapp context.
    * @param name The name of the attribute

Added: hadoop/core/trunk/src/test/org/apache/hadoop/http/TestGlobalFilter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/http/TestGlobalFilter.java?rev=706518&view=auto
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/http/TestGlobalFilter.java (added)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/http/TestGlobalFilter.java Mon Oct 20 22:04:50 2008
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.http;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Map;
+import java.util.Random;
+import java.util.TreeMap;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+public class TestGlobalFilter extends junit.framework.TestCase {
+  static final Log LOG = LogFactory.getLog(HttpServer.class);
+  static final Map<String, Integer> COUNTS = new TreeMap<String, Integer>(); 
+
+  /** A very simple filter which counts number of accesses for each uri */
+  static public class CountingFilter implements Filter {
+    private FilterConfig filterConfig = null;
+
+    public void init(FilterConfig filterConfig) {
+      this.filterConfig = filterConfig;
+    }
+
+    public void destroy() {
+      this.filterConfig = null;
+    }
+
+    public void doFilter(ServletRequest request, ServletResponse response,
+        FilterChain chain) throws IOException, ServletException {
+      if (filterConfig == null)
+         return;
+
+      String uri = ((HttpServletRequest)request).getRequestURI();
+      LOG.info("filtering " + uri);
+      Integer value = COUNTS.get(uri);
+      value = value == null? 1: value + 1;
+      COUNTS.put(uri, value);
+      chain.doFilter(request, response);
+    }
+
+    /** Configuration for CountingFilter */
+    static public class Initializer extends FilterInitializer {
+      public Initializer() {}
+
+      void initFilter(FilterContainer container) {
+        container.addGlobalFilter("counting", CountingFilter.class.getName(), null);
+      }
+    }
+  }
+  
+  
+  /** access a url, ignoring some IOException such as the page does not exist */
+  static void access(String urlstring) throws IOException {
+    LOG.warn("access " + urlstring);
+    URL url = new URL(urlstring);
+    URLConnection connection = url.openConnection();
+    connection.connect();
+    
+    try {
+      BufferedReader in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream()));
+      try {
+        for(; in.readLine() != null; );
+      } finally {
+        in.close();
+      }
+    } catch(IOException ioe) {
+      LOG.warn("urlstring=" + urlstring, ioe);
+    }
+  }
+
+  public void testServletFilter() throws Exception {
+    Configuration conf = new Configuration();
+    
+    //start a http server with CountingFilter
+    conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
+        CountingFilter.Initializer.class.getName());
+    HttpServer http = new HttpServer("datanode", "localhost", 0, true, conf);
+    http.start();
+
+    final String fsckURL = "/fsck";
+    final String stacksURL = "/stacks";
+    final String ajspURL = "/a.jsp";
+    final String listPathsURL = "/listPaths";
+    final String dataURL = "/data";
+    final String streamFile = "/streamFile";
+    final String rootURL = "/";
+    final String allURL = "/*";
+    final String outURL = "/static/a.out";
+    final String logURL = "/logs/a.log";
+
+    final String[] urls = {fsckURL, stacksURL, ajspURL, listPathsURL, 
+        dataURL, streamFile, rootURL, allURL, outURL, logURL};
+    final Random ran = new Random();
+    final int[] sequence = new int[100];
+    final int[] counts = new int[urls.length]; 
+
+    //generate a random sequence and update counts 
+    for(int i = 0; i < sequence.length; i++) {
+      sequence[i] = ran.nextInt(urls.length);
+      counts[sequence[i]]++;
+    }
+
+    //access the urls as the sequence
+    final String prefix = "http://localhost:" + http.getPort();
+    try {
+      for(int i = 0; i < sequence.length; i++) {
+        access(prefix + urls[sequence[i]]);
+      }
+    } finally {
+      http.stop();
+    }
+
+    LOG.info("COUNTS = " + COUNTS);
+    
+    //verify counts
+    for(int i = 0; i < urls.length; i++) {
+      if (counts[i] == 0) {
+        assertFalse(COUNTS.containsKey(urls[i]));
+      } else {
+        assertEquals(counts[i], COUNTS.remove(urls[i]).intValue());
+      }
+    }
+    assertTrue(COUNTS.isEmpty());
+  }
+}