You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by tu...@apache.org on 2011/12/08 20:25:33 UTC

svn commit: r1212060 [8/8] - in /hadoop/common/trunk/hadoop-hdfs-project: ./ hadoop-hdfs-httpfs/ hadoop-hdfs-httpfs/src/ hadoop-hdfs-httpfs/src/main/ hadoop-hdfs-httpfs/src/main/conf/ hadoop-hdfs-httpfs/src/main/java/ hadoop-hdfs-httpfs/src/main/java/o...

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestHdfsHelper.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.Test;
+import org.junit.runners.model.FrameworkMethod;
+import org.junit.runners.model.Statement;
+
+import java.io.File;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class TestHdfsHelper extends TestDirHelper {
+
+  @Test
+  public void dummy() {
+  }
+
+  public static final String HADOOP_MINI_HDFS = "test.hadoop.hdfs";
+
+  private static ThreadLocal<Configuration> HDFS_CONF_TL = new InheritableThreadLocal<Configuration>();
+
+  private static ThreadLocal<Path> HDFS_TEST_DIR_TL = new InheritableThreadLocal<Path>();
+
+  @Override
+  public Statement apply(Statement statement, FrameworkMethod frameworkMethod, Object o) {
+    TestHdfs testHdfsAnnotation = frameworkMethod.getAnnotation(TestHdfs.class);
+    if (testHdfsAnnotation != null) {
+      statement = new HdfsStatement(statement, frameworkMethod.getName());
+    }
+    return super.apply(statement, frameworkMethod, o);
+  }
+
+  private static class HdfsStatement extends Statement {
+    private Statement statement;
+    private String testName;
+
+    public HdfsStatement(Statement statement, String testName) {
+      this.statement = statement;
+      this.testName = testName;
+    }
+
+    @Override
+    public void evaluate() throws Throwable {
+      MiniDFSCluster miniHdfs = null;
+      Configuration conf = HadoopUsersConfTestHelper.getBaseConf();
+      if (Boolean.parseBoolean(System.getProperty(HADOOP_MINI_HDFS, "true"))) {
+        miniHdfs = startMiniHdfs(conf);
+        conf = miniHdfs.getConfiguration(0);
+      }
+      try {
+        HDFS_CONF_TL.set(conf);
+        HDFS_TEST_DIR_TL.set(resetHdfsTestDir(conf));
+        statement.evaluate();
+      } finally {
+        HDFS_CONF_TL.remove();
+        HDFS_TEST_DIR_TL.remove();
+      }
+    }
+
+    private static AtomicInteger counter = new AtomicInteger();
+
+    private Path resetHdfsTestDir(Configuration conf) {
+
+      Path testDir = new Path("./" + TEST_DIR_ROOT, testName + "-" + counter.getAndIncrement());
+      try {
+        // currentUser
+        FileSystem fs = FileSystem.get(conf);
+        fs.delete(testDir, true);
+        fs.mkdirs(testDir);
+      } catch (Exception ex) {
+        throw new RuntimeException(ex);
+      }
+      return testDir;
+    }
+  }
+
+  /**
+   * Returns the HDFS test directory for the current test, only available when the
+   * test method has been annotated with {@link TestHdfs}.
+   *
+   * @return the HDFS test directory for the current test. It is an full/absolute
+   *         <code>Path</code>.
+   */
+  public static Path getHdfsTestDir() {
+    Path testDir = HDFS_TEST_DIR_TL.get();
+    if (testDir == null) {
+      throw new IllegalStateException("This test does not use @TestHdfs");
+    }
+    return testDir;
+  }
+
+  /**
+   * Returns a FileSystemAccess <code>JobConf</code> preconfigured with the FileSystemAccess cluster
+   * settings for testing. This configuration is only available when the test
+   * method has been annotated with {@link TestHdfs}. Refer to {@link HTestCase}
+   * header for details)
+   *
+   * @return the FileSystemAccess <code>JobConf</code> preconfigured with the FileSystemAccess cluster
+   *         settings for testing
+   */
+  public static Configuration getHdfsConf() {
+    Configuration conf = HDFS_CONF_TL.get();
+    if (conf == null) {
+      throw new IllegalStateException("This test does not use @TestHdfs");
+    }
+    return new Configuration(conf);
+  }
+
+  private static MiniDFSCluster MINI_DFS = null;
+
+  private static synchronized MiniDFSCluster startMiniHdfs(Configuration conf) throws Exception {
+    if (MINI_DFS == null) {
+      if (System.getProperty("hadoop.log.dir") == null) {
+        System.setProperty("hadoop.log.dir", new File(TEST_DIR_ROOT, "hadoop-log").getAbsolutePath());
+      }
+      if (System.getProperty("test.build.data") == null) {
+        System.setProperty("test.build.data", new File(TEST_DIR_ROOT, "hadoop-data").getAbsolutePath());
+      }
+
+      conf = new Configuration(conf);
+      HadoopUsersConfTestHelper.addUserConf(conf);
+      conf.set("fs.hdfs.impl.disable.cache", "true");
+      conf.set("dfs.block.access.token.enable", "false");
+      conf.set("dfs.permissions", "true");
+      conf.set("hadoop.security.authentication", "simple");
+      MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+      builder.numDataNodes(2);
+      MiniDFSCluster miniHdfs = builder.build();
+      FileSystem fileSystem = miniHdfs.getFileSystem();
+      fileSystem.mkdirs(new Path("/tmp"));
+      fileSystem.mkdirs(new Path("/user"));
+      fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
+      fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
+      MINI_DFS = miniHdfs;
+    }
+    return MINI_DFS;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJetty.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJetty.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJetty.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJetty.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+/**
+ * Annotation for {@link TestJettyHelper} subclasses to indicate that the test method
+ * requires a Jetty servlet-container.
+ * <p/>
+ * The {@link TestJettyHelper#getJettyServer()} returns a ready to configure Jetty
+ * servlet-container. After registering contexts, servlets, filters the the Jetty
+ * server must be started (<code>getJettyServer.start()</code>. The Jetty server
+ * is automatically stopped at the end of the test method invocation.
+ * <p/>
+ * Use the {@link TestJettyHelper#getJettyURL()} to obtain the base URL
+ * (schema://host:port) of the Jetty server.
+ * <p/>
+ * Refer to the {@link HTestCase} class for more details.
+ */
+@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+@Target(java.lang.annotation.ElementType.METHOD)
+public @interface TestJetty {
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java Thu Dec  8 19:25:28 2011
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.junit.Test;
+import org.junit.rules.MethodRule;
+import org.junit.runners.model.FrameworkMethod;
+import org.junit.runners.model.Statement;
+import org.mortbay.jetty.Server;
+
+import java.net.InetAddress;
+import java.net.MalformedURLException;
+import java.net.ServerSocket;
+import java.net.URL;
+
+public class TestJettyHelper implements MethodRule {
+
+  @Test
+  public void dummy() {
+  }
+
+  private static ThreadLocal<Server> TEST_SERVLET_TL = new InheritableThreadLocal<Server>();
+
+  @Override
+  public Statement apply(final Statement statement, final FrameworkMethod frameworkMethod, final Object o) {
+    return new Statement() {
+      @Override
+      public void evaluate() throws Throwable {
+        Server server = null;
+        TestJetty testJetty = frameworkMethod.getAnnotation(TestJetty.class);
+        if (testJetty != null) {
+          server = createJettyServer();
+        }
+        try {
+          TEST_SERVLET_TL.set(server);
+          statement.evaluate();
+        } finally {
+          TEST_SERVLET_TL.remove();
+          if (server != null && server.isRunning()) {
+            try {
+              server.stop();
+            } catch (Exception ex) {
+              throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(), ex);
+            }
+          }
+        }
+      }
+    };
+  }
+
+  private Server createJettyServer() {
+    try {
+
+      String host = InetAddress.getLocalHost().getHostName();
+      ServerSocket ss = new ServerSocket(0);
+      int port = ss.getLocalPort();
+      ss.close();
+      Server server = new Server(0);
+      server.getConnectors()[0].setHost(host);
+      server.getConnectors()[0].setPort(port);
+      return server;
+    } catch (Exception ex) {
+      throw new RuntimeException("Could not stop embedded servlet container, " + ex.getMessage(), ex);
+    }
+  }
+
+  /**
+   * Returns a Jetty server ready to be configured and the started. This server
+   * is only available when the test method has been annotated with
+   * {@link TestJetty}. Refer to {@link HTestCase} header for details.
+   * <p/>
+   * Once configured, the Jetty server should be started. The server will be
+   * automatically stopped when the test method ends.
+   *
+   * @return a Jetty server ready to be configured and the started.
+   */
+  public static Server getJettyServer() {
+    Server server = TEST_SERVLET_TL.get();
+    if (server == null) {
+      throw new IllegalStateException("This test does not use @TestJetty");
+    }
+    return server;
+  }
+
+  /**
+   * Returns the base URL (SCHEMA://HOST:PORT) of the test Jetty server
+   * (see {@link #getJettyServer()}) once started.
+   *
+   * @return the base URL (SCHEMA://HOST:PORT) of the test Jetty server.
+   */
+  public static URL getJettyURL() {
+    Server server = TEST_SERVLET_TL.get();
+    if (server == null) {
+      throw new IllegalStateException("This test does not use @TestJetty");
+    }
+    try {
+      return new URL("http://" + server.getConnectors()[0].getHost() + ":" + server.getConnectors()[0].getPort());
+    } catch (MalformedURLException ex) {
+      throw new RuntimeException("It should never happen, " + ex.getMessage(), ex);
+    }
+  }
+
+}

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp1.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp1.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp1.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp1.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,13 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp2.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp2.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp2.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/TestServerWebApp2.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,15 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+# 
+#

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/classutils.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/classutils.txt?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/classutils.txt (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/classutils.txt Thu Dec  8 19:25:28 2011
@@ -0,0 +1 @@
+dummy

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/default-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/default-log4j.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/default-log4j.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/default-log4j.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,22 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#log4j.appender.test=org.apache.log4j.varia.NullAppender
+#log4j.appender.test=org.apache.log4j.ConsoleAppender
+log4j.appender.test=org.apache.log4j.FileAppender
+log4j.appender.test.File=${test.dir}/test.log
+log4j.appender.test.Append=true
+log4j.appender.test.layout=org.apache.log4j.PatternLayout
+log4j.appender.test.layout.ConversionPattern=%d{ISO8601} %5p %20c{1}: %4L - %m%n
+log4j.rootLogger=ALL, test
+

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/server.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/server.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/server.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/server.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,13 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver-default.xml?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver-default.xml (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver-default.xml Thu Dec  8 19:25:28 2011
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>testserver.a</name>
+    <value>default</value>
+  </property>
+</configuration>

Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver.properties?rev=1212060&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver.properties (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/testserver.properties Thu Dec  8 19:25:28 2011
@@ -0,0 +1,13 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1212060&r1=1212059&r2=1212060&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Thu Dec  8 19:25:28 2011
@@ -24,6 +24,9 @@ Trunk (unreleased changes)
     HDFS-2430. The number of failed or low-resource volumes the NN can tolerate
                should be configurable. (atm)
 
+    HDFS-2178. Contributing Hoop to HDFS, replacement for HDFS proxy with 
+    read/write capabilities. (tucu)
+
   IMPROVEMENTS
 
     HADOOP-7524 Change RPC to allow multiple protocols including multuple 

Modified: hadoop/common/trunk/hadoop-hdfs-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/pom.xml?rev=1212060&r1=1212059&r2=1212060&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/pom.xml (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/pom.xml Thu Dec  8 19:25:28 2011
@@ -29,6 +29,7 @@
 
   <modules>
     <module>hadoop-hdfs</module>
+    <module>hadoop-hdfs-httpfs</module>
   </modules>
 
   <build>