You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2011/09/29 02:10:03 UTC
svn commit: r1177117 - in
/hadoop/common/branches/HDFS-1623/hadoop-common-project: ./ hadoop-common/
hadoop-common/src/main/docs/
hadoop-common/src/main/docs/src/documentation/content/xdocs/
hadoop-common/src/main/java/ hadoop-common/src/main/java/org/...
Author: suresh
Date: Thu Sep 29 00:09:56 2011
New Revision: 1177117
URL: http://svn.apache.org/viewvc?rev=1177117&view=rev
Log:
Merging trunk to HDFS-1623 branch.
Added:
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-metrics2.properties
- copied unchanged from r1177115, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-metrics2.properties
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
- copied unchanged from r1177115, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
- copied unchanged from r1177115, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
Modified:
hadoop/common/branches/HDFS-1623/hadoop-common-project/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Thu Sep 29 00:09:56 2011
@@ -0,0 +1 @@
+target
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Thu Sep 29 00:09:56 2011
@@ -2,6 +2,12 @@ Hadoop Change Log
Trunk (unreleased changes)
+ INCOMPATIBLE CHANGES
+
+ HADOOP-7542. Change Configuration XML format to 1.1 to add support for
+ serializing additional characters. This requires XML1.1
+ support in the XML parser (Christopher Egner via harsh)
+
IMPROVEMENTS
HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
@@ -13,6 +19,11 @@ Trunk (unreleased changes)
HADOOP-7635. RetryInvocationHandler should release underlying resources on
close (atm)
+
+ HADOOP-7668. Add a NetUtils method that can tell if an InetAddress
+ belongs to local host. (suresh)
+
+ HADOOP-7687 Make getProtocolSignature public (sanjay)
BUGS
@@ -23,6 +34,16 @@ Trunk (unreleased changes)
HADOOP-7641. Add Apache License to template config files (Eric Yang via atm)
+ HADOOP-7621. alfredo config should be in a file not readable by users
+ (Alejandro Abdelnur via atm)
+
+ HADOOP-7669 Fix newly introduced release audit warning.
+ (Uma Maheswara Rao G via stevel)
+
+ HADOOP-6220. HttpServer wraps InterruptedExceptions by IOExceptions if interrupted
+ in startup (stevel)
+
+
Release 0.23.0 - Unreleased
INCOMPATIBLE CHANGES
@@ -287,9 +308,6 @@ Release 0.23.0 - Unreleased
HADOOP-7430. Improve error message when moving to trash fails due to
quota issue. (Ravi Prakash via mattf)
- HADOOP-7457. Remove out-of-date Chinese language documentation.
- (Jakob Homan via eli)
-
HADOOP-7444. Add Checksum API to verify and calculate checksums "in bulk"
(todd)
@@ -388,6 +406,13 @@ Release 0.23.0 - Unreleased
HADOOP-7599. Script improvements to setup a secure Hadoop cluster
(Eric Yang via ddas)
+ HADOOP-7639. Enhance HttpServer to allow passing path-specs for filtering,
+ so that servers like Yarn WebApp can get filtered the paths served by
+ their own injected servlets. (Thomas Graves via vinodkv)
+
+ HADOOP-7575. Enhanced LocalDirAllocator to support fully-qualified
+ paths. (Jonathan Eagles via vinodkv)
+
OPTIMIZATIONS
HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -398,6 +423,9 @@ Release 0.23.0 - Unreleased
BUG FIXES
+ HADOOP-7630. hadoop-metrics2.properties should have a property *.period
+ set to a default value for metrics. (Eric Yang via mattf)
+
HADOOP-7327. FileSystem.listStatus() throws NullPointerException instead of
IOException upon access permission failure. (mattf)
@@ -603,6 +631,9 @@ Release 0.23.0 - Unreleased
HADOOP-7631. Fixes a config problem to do with running streaming jobs
(Eric Yang via ddas)
+ HADOOP-7662. Fixed logs servlet to use the pathspec '/*' instead of '/'
+ for correct filtering. (Thomas Graves via vinodkv)
+
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES
@@ -1118,6 +1149,11 @@ Release 0.22.0 - Unreleased
HADOOP-7568. SequenceFile should not print into stdout.
(Plamen Jeliazkov via shv)
+ HADOOP-7663. Fix TestHDFSTrash failure. (Mayank Bansal via shv)
+
+ HADOOP-7457. Remove out-of-date Chinese language documentation.
+ (Jakob Homan via eli)
+
Release 0.21.1 - Unreleased
IMPROVEMENTS
Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:09:56 2011
@@ -1,5 +1,5 @@
/hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1173011
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1177115
/hadoop/core/branches/branch-0.18/CHANGES.txt:727226
/hadoop/core/branches/branch-0.19/CHANGES.txt:713112
/hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278
Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:09:56 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1173011
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1177115
/hadoop/core/branches/branch-0.19/src/docs:713112
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml Thu Sep 29 00:09:56 2011
@@ -82,10 +82,12 @@
<code>36000</code>.
</p>
- <p><code>hadoop.http.authentication.signature.secret</code>: The signature secret for
- signing the authentication tokens. If not set a random secret is generated at
+ <p><code>hadoop.http.authentication.signature.secret.file</code>: The signature secret
+ file for signing the authentication tokens. If not set a random secret is generated at
startup time. The same secret should be used for all nodes in the cluster, JobTracker,
- NameNode, DataNode and TastTracker. The default value is a <code>hadoop</code> value.
+ NameNode, DataNode and TastTracker. The default value is
+ <code>${user.home}/hadoop-http-auth-signature-secret</code>.
+ IMPORTANT: This file should be readable only by the Unix user running the daemons.
</p>
<p><code>hadoop.http.authentication.cookie.domain</code>: The domain to use for the HTTP
Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:09:56 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1173011
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1177115
/hadoop/core/branches/branch-0.19/core/src/java:713112
/hadoop/core/trunk/src/core:776175-785643,785929-786278
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Thu Sep 29 00:09:56 2011
@@ -1632,6 +1632,10 @@ public class Configuration implements It
try {
doc =
DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
+
+ // Allow a broader set of control characters to appear in job confs.
+ // cf https://issues.apache.org/jira/browse/MAPREDUCE-109
+ doc.setXmlVersion( "1.1" );
} catch (ParserConfigurationException pe) {
throw new IOException(pe);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java Thu Sep 29 00:09:56 2011
@@ -264,9 +264,15 @@ public class LocalDirAllocator {
Path tmpDir = new Path(localDirs[i]);
if(localFS.mkdirs(tmpDir)|| localFS.exists(tmpDir)) {
try {
- DiskChecker.checkDir(new File(localDirs[i]));
- dirs.add(localDirs[i]);
- dfList.add(new DF(new File(localDirs[i]), 30000));
+
+ File tmpFile = tmpDir.isAbsolute()
+ ? new File(localFS.makeQualified(tmpDir).toUri())
+ : new File(localDirs[i]);
+
+ DiskChecker.checkDir(tmpFile);
+ dirs.add(tmpFile.getPath());
+ dfList.add(new DF(tmpFile, 30000));
+
} catch (DiskErrorException de) {
LOG.warn( localDirs[i] + " is not writable\n", de);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Thu Sep 29 00:09:56 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.http;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
+import java.io.InterruptedIOException;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.URL;
@@ -124,6 +125,29 @@ public class HttpServer implements Filte
boolean findPort, Configuration conf, Connector connector) throws IOException {
this(name, bindAddress, port, findPort, conf, null, connector);
}
+
+ /**
+ * Create a status server on the given port. Allows you to specify the
+ * path specifications that this server will be serving so that they will be
+ * added to the filters properly.
+ *
+ * @param name The name of the server
+ * @param bindAddress The address for this server
+ * @param port The port to use on the server
+ * @param findPort whether the server should start at the given port and
+ * increment by 1 until it finds a free port.
+ * @param conf Configuration
+ * @param pathSpecs Path specifications that this httpserver will be serving.
+ * These will be added to any filters.
+ */
+ public HttpServer(String name, String bindAddress, int port,
+ boolean findPort, Configuration conf, String[] pathSpecs) throws IOException {
+ this(name, bindAddress, port, findPort, conf, null, null);
+ for (String path : pathSpecs) {
+ LOG.info("adding path spec: " + path);
+ addFilterPathMapping(path, webAppContext);
+ }
+ }
/**
* Create a status server on the given port.
@@ -259,7 +283,7 @@ public class HttpServer implements Filte
if (logDir != null) {
Context logContext = new Context(parent, "/logs");
logContext.setResourceBase(logDir);
- logContext.addServlet(AdminAuthorizedServlet.class, "/");
+ logContext.addServlet(AdminAuthorizedServlet.class, "/*");
logContext.setDisplayName("logs");
setContextAttributes(logContext, conf);
defaultContexts.put(logContext, true);
@@ -660,6 +684,9 @@ public class HttpServer implements Filte
}
} catch (IOException e) {
throw e;
+ } catch (InterruptedException e) {
+ throw (IOException) new InterruptedIOException(
+ "Interrupted while starting HTTP server").initCause(e);
} catch (Exception e) {
throw new IOException("Problem starting http server", e);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolSignature.java Thu Sep 29 00:09:56 2011
@@ -199,7 +199,7 @@ public class ProtocolSignature implement
* @param protocol protocol
* @return the server's protocol signature
*/
- static ProtocolSignature getProtocolSignature(
+ public static ProtocolSignature getProtocolSignature(
int clientMethodsHashCode,
long serverVersion,
Class<? extends VersionedProtocol> protocol) {
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Thu Sep 29 00:09:56 2011
@@ -516,4 +516,25 @@ public class NetUtils {
} catch (UnknownHostException ignore) { }
return addr;
}
+
+ /**
+ * Given an InetAddress, checks to see if the address is a local address, by
+ * comparing the address with all the interfaces on the node.
+ * @param addr address to check if it is local node's address
+ * @return true if the address corresponds to the local node
+ */
+ public static boolean isLocalAddress(InetAddress addr) {
+ // Check if the address is any local or loop back
+ boolean local = addr.isAnyLocalAddress() || addr.isLoopbackAddress();
+
+ // Check if the address is defined on any interface
+ if (!local) {
+ try {
+ local = NetworkInterface.getByInetAddress(addr) != null;
+ } catch (SocketException e) {
+ local = false;
+ }
+ }
+ return local;
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/AuthenticationFilterInitializer.java Thu Sep 29 00:09:56 2011
@@ -22,6 +22,9 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.http.FilterContainer;
import org.apache.hadoop.http.FilterInitializer;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
@@ -40,8 +43,10 @@ import java.util.Map;
*/
public class AuthenticationFilterInitializer extends FilterInitializer {
- private static final String PREFIX = "hadoop.http.authentication.";
+ static final String PREFIX = "hadoop.http.authentication.";
+ static final String SIGNATURE_SECRET_FILE = AuthenticationFilter.SIGNATURE_SECRET + ".file";
+
/**
* Initializes Alfredo AuthenticationFilter.
* <p/>
@@ -67,6 +72,25 @@ public class AuthenticationFilterInitial
}
}
+ String signatureSecretFile = filterConfig.get(SIGNATURE_SECRET_FILE);
+ if (signatureSecretFile == null) {
+ throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
+ }
+
+ try {
+ StringBuilder secret = new StringBuilder();
+ Reader reader = new FileReader(signatureSecretFile);
+ int c = reader.read();
+ while (c > -1) {
+ secret.append((char)c);
+ c = reader.read();
+ }
+ reader.close();
+ filterConfig.put(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
+ } catch (IOException ex) {
+ throw new RuntimeException("Could not read HTTP signature secret file: " + signatureSecretFile);
+ }
+
container.addFilter("authentication",
AuthenticationFilter.class.getName(),
filterConfig);
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh Thu Sep 29 00:09:56 2011
@@ -475,7 +475,10 @@ else
template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/taskcontroller.cfg ${HADOOP_CONF_DIR}/taskcontroller.cfg
template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
if [ ! -e ${HADOOP_CONF_DIR}/capacity-scheduler.xml ]; then
- template_generator ${HADOOP_PREFIX}/share/hadoop/templates/conf/capacity-scheduler.xml ${HADOOP_CONF_DIR}/capacity-scheduler.xml
+ template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/capacity-scheduler.xml ${HADOOP_CONF_DIR}/capacity-scheduler.xml
+ fi
+ if [ ! -e ${HADOOP_CONF_DIR}/hadoop-metrics2.properties ]; then
+ cp ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
fi
if [ ! -e ${HADOOP_CONF_DIR}/log4j.properties ]; then
cp ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/log4j.properties ${HADOOP_CONF_DIR}/log4j.properties
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml Thu Sep 29 00:09:56 2011
@@ -145,6 +145,26 @@
</property>
<property>
+ <name>dfs.web.authentication.kerberos.principal</name>
+ <value>HTTP/_HOST@${local.realm}</value>
+ <description>
+ The HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
+
+ The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos
+ HTTP SPENGO specification.
+ </description>
+ </property>
+
+ <property>
+ <name>dfs.web.authentication.kerberos.keytab</name>
+ <value>/etc/security/keytabs/nn.service.keytab</value>
+ <description>
+ The Kerberos keytab file with the credentials for the
+ HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint.
+ </description>
+ </property>
+
+ <property>
<name>dfs.namenode.keytab.file</name>
<value>/etc/security/keytabs/nn.service.keytab</value>
<description>
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Thu Sep 29 00:09:56 2011
@@ -808,8 +808,8 @@
</property>
<property>
- <name>hadoop.http.authentication.signature.secret</name>
- <value>hadoop</value>
+ <name>hadoop.http.authentication.signature.secret.file</name>
+ <value>${user.home}/hadoop-http-auth-signature-secret</value>
<description>
The signature secret for signing the authentication tokens.
If not set a random secret is generated at startup time.
Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Sep 29 00:09:56 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1173011
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1177115
/hadoop/core/branches/branch-0.19/core/src/test/core:713112
/hadoop/core/trunk/src/test/core:776175-785643,785929-786278
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Thu Sep 29 00:09:56 2011
@@ -58,7 +58,7 @@ public class TestConfiguration extends T
}
private void startConfig() throws IOException{
- out.write("<?xml version=\"1.0\"?>\n");
+ out.write("<?xml version=\"1.1\"?>\n");
out.write("<configuration>\n");
}
@@ -221,6 +221,18 @@ public class TestConfiguration extends T
assertEquals("this contains a comment", conf.get("my.comment"));
}
+ public void testControlAInValue() throws IOException {
+ out = new BufferedWriter(new FileWriter(CONFIG));
+ startConfig();
+ appendProperty("my.char", "");
+ appendProperty("my.string", "somestring");
+ endConfig();
+ Path fileResource = new Path(CONFIG);
+ conf.addResource(fileResource);
+ assertEquals("\u0001", conf.get("my.char"));
+ assertEquals("some\u0001string", conf.get("my.string"));
+ }
+
public void testTrim() throws IOException {
out=new BufferedWriter(new FileWriter(CONFIG));
startConfig();
@@ -298,7 +310,7 @@ public class TestConfiguration extends T
conf.writeXml(baos);
String result = baos.toString();
assertTrue("Result has proper header", result.startsWith(
- "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>"));
+ "<?xml version=\"1.1\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>"));
assertTrue("Result has proper footer", result.endsWith("</configuration>"));
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java Thu Sep 29 00:09:56 2011
@@ -20,40 +20,48 @@ package org.apache.hadoop.fs;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
-import junit.framework.TestCase;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
/** This test LocalDirAllocator works correctly;
- * Every test case uses different buffer dirs to
+ * Every test case uses different buffer dirs to
* enforce the AllocatorPerContext initialization.
* This test does not run on Cygwin because under Cygwin
* a directory can be created in a read-only directory
* which breaks this test.
- */
-public class TestLocalDirAllocator extends TestCase {
+ */
+@RunWith(Parameterized.class)
+public class TestLocalDirAllocator {
final static private Configuration conf = new Configuration();
final static private String BUFFER_DIR_ROOT = "build/test/temp";
+ final static private String ABSOLUTE_DIR_ROOT;
+ final static private String QUALIFIED_DIR_ROOT;
final static private Path BUFFER_PATH_ROOT = new Path(BUFFER_DIR_ROOT);
final static private File BUFFER_ROOT = new File(BUFFER_DIR_ROOT);
- final static private String BUFFER_DIR[] = new String[] {
- BUFFER_DIR_ROOT+"/tmp0", BUFFER_DIR_ROOT+"/tmp1", BUFFER_DIR_ROOT+"/tmp2",
- BUFFER_DIR_ROOT+"/tmp3", BUFFER_DIR_ROOT+"/tmp4", BUFFER_DIR_ROOT+"/tmp5",
- BUFFER_DIR_ROOT+"/tmp6"};
- final static private Path BUFFER_PATH[] = new Path[] {
- new Path(BUFFER_DIR[0]), new Path(BUFFER_DIR[1]), new Path(BUFFER_DIR[2]),
- new Path(BUFFER_DIR[3]), new Path(BUFFER_DIR[4]), new Path(BUFFER_DIR[5]),
- new Path(BUFFER_DIR[6])};
- final static private String CONTEXT = "dfs.client.buffer.dir";
+ final static private String CONTEXT = "fs.client.buffer.dir";
final static private String FILENAME = "block";
- final static private LocalDirAllocator dirAllocator =
+ final static private LocalDirAllocator dirAllocator =
new LocalDirAllocator(CONTEXT);
static LocalFileSystem localFs;
final static private boolean isWindows =
System.getProperty("os.name").startsWith("Windows");
final static int SMALL_FILE_SIZE = 100;
+ final static private String RELATIVE = "/RELATIVE";
+ final static private String ABSOLUTE = "/ABSOLUTE";
+ final static private String QUALIFIED = "/QUALIFIED";
+ final private String ROOT;
+ final private String PREFIX;
+
static {
try {
localFs = FileSystem.getLocal(conf);
@@ -63,170 +71,214 @@ public class TestLocalDirAllocator exten
e.printStackTrace();
System.exit(-1);
}
+
+ ABSOLUTE_DIR_ROOT = new Path(localFs.getWorkingDirectory(),
+ BUFFER_DIR_ROOT).toUri().getPath();
+ QUALIFIED_DIR_ROOT = new Path(localFs.getWorkingDirectory(),
+ BUFFER_DIR_ROOT).toUri().toString();
+ }
+
+ public TestLocalDirAllocator(String root, String prefix) {
+ ROOT = root;
+ PREFIX = prefix;
+ }
+
+ @Parameters
+ public static Collection<Object[]> params() {
+ Object [][] data = new Object[][] {
+ { BUFFER_DIR_ROOT, RELATIVE },
+ { ABSOLUTE_DIR_ROOT, ABSOLUTE },
+ { QUALIFIED_DIR_ROOT, QUALIFIED }
+ };
+
+ return Arrays.asList(data);
}
private static void rmBufferDirs() throws IOException {
assertTrue(!localFs.exists(BUFFER_PATH_ROOT) ||
localFs.delete(BUFFER_PATH_ROOT, true));
}
-
- private void validateTempDirCreation(int i) throws IOException {
+
+ private static void validateTempDirCreation(String dir) throws IOException {
File result = createTempFile(SMALL_FILE_SIZE);
- assertTrue("Checking for " + BUFFER_DIR[i] + " in " + result + " - FAILED!",
- result.getPath().startsWith(new File(BUFFER_DIR[i], FILENAME).getPath()));
+ assertTrue("Checking for " + dir + " in " + result + " - FAILED!",
+ result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()));
}
-
- private File createTempFile() throws IOException {
- File result = dirAllocator.createTmpFileForWrite(FILENAME, -1, conf);
- result.delete();
- return result;
+
+ private static File createTempFile() throws IOException {
+ return createTempFile(-1);
}
-
- private File createTempFile(long size) throws IOException {
+
+ private static File createTempFile(long size) throws IOException {
File result = dirAllocator.createTmpFileForWrite(FILENAME, size, conf);
result.delete();
return result;
}
-
- /** Two buffer dirs. The first dir does not exist & is on a read-only disk;
+
+ private String buildBufferDir(String dir, int i) {
+ return dir + PREFIX + i;
+ }
+
+ /** Two buffer dirs. The first dir does not exist & is on a read-only disk;
* The second dir exists & is RW
* @throws Exception
*/
+ @Test
public void test0() throws Exception {
if (isWindows) return;
+ String dir0 = buildBufferDir(ROOT, 0);
+ String dir1 = buildBufferDir(ROOT, 1);
try {
- conf.set(CONTEXT, BUFFER_DIR[0]+","+BUFFER_DIR[1]);
- assertTrue(localFs.mkdirs(BUFFER_PATH[1]));
+ conf.set(CONTEXT, dir0 + "," + dir1);
+ assertTrue(localFs.mkdirs(new Path(dir1)));
BUFFER_ROOT.setReadOnly();
- validateTempDirCreation(1);
- validateTempDirCreation(1);
+ validateTempDirCreation(dir1);
+ validateTempDirCreation(dir1);
} finally {
Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
rmBufferDirs();
}
}
-
- /** Two buffer dirs. The first dir exists & is on a read-only disk;
+
+ /** Two buffer dirs. The first dir exists & is on a read-only disk;
* The second dir exists & is RW
* @throws Exception
*/
+ @Test
public void test1() throws Exception {
if (isWindows) return;
+ String dir1 = buildBufferDir(ROOT, 1);
+ String dir2 = buildBufferDir(ROOT, 2);
try {
- conf.set(CONTEXT, BUFFER_DIR[1]+","+BUFFER_DIR[2]);
- assertTrue(localFs.mkdirs(BUFFER_PATH[2]));
+ conf.set(CONTEXT, dir1 + "," + dir2);
+ assertTrue(localFs.mkdirs(new Path(dir2)));
BUFFER_ROOT.setReadOnly();
- validateTempDirCreation(2);
- validateTempDirCreation(2);
+ validateTempDirCreation(dir2);
+ validateTempDirCreation(dir2);
} finally {
Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
rmBufferDirs();
}
}
/** Two buffer dirs. Both do not exist but on a RW disk.
- * Check if tmp dirs are allocated in a round-robin
+ * Check if tmp dirs are allocated in a round-robin
*/
+ @Test
public void test2() throws Exception {
if (isWindows) return;
+ String dir2 = buildBufferDir(ROOT, 2);
+ String dir3 = buildBufferDir(ROOT, 3);
try {
- conf.set(CONTEXT, BUFFER_DIR[2]+","+BUFFER_DIR[3]);
+ conf.set(CONTEXT, dir2 + "," + dir3);
// create the first file, and then figure the round-robin sequence
createTempFile(SMALL_FILE_SIZE);
int firstDirIdx = (dirAllocator.getCurrentDirectoryIndex() == 0) ? 2 : 3;
int secondDirIdx = (firstDirIdx == 2) ? 3 : 2;
-
+
// check if tmp dirs are allocated in a round-robin manner
- validateTempDirCreation(firstDirIdx);
- validateTempDirCreation(secondDirIdx);
- validateTempDirCreation(firstDirIdx);
+ validateTempDirCreation(buildBufferDir(ROOT, firstDirIdx));
+ validateTempDirCreation(buildBufferDir(ROOT, secondDirIdx));
+ validateTempDirCreation(buildBufferDir(ROOT, firstDirIdx));
} finally {
rmBufferDirs();
}
}
- /** Two buffer dirs. Both exists and on a R/W disk.
+ /** Two buffer dirs. Both exists and on a R/W disk.
* Later disk1 becomes read-only.
* @throws Exception
*/
+ @Test
public void test3() throws Exception {
if (isWindows) return;
+ String dir3 = buildBufferDir(ROOT, 3);
+ String dir4 = buildBufferDir(ROOT, 4);
try {
- conf.set(CONTEXT, BUFFER_DIR[3]+","+BUFFER_DIR[4]);
- assertTrue(localFs.mkdirs(BUFFER_PATH[3]));
- assertTrue(localFs.mkdirs(BUFFER_PATH[4]));
-
- // create the first file with size, and then figure the round-robin sequence
+ conf.set(CONTEXT, dir3 + "," + dir4);
+ assertTrue(localFs.mkdirs(new Path(dir3)));
+ assertTrue(localFs.mkdirs(new Path(dir4)));
+
+ // Create the first small file
createTempFile(SMALL_FILE_SIZE);
+ // Determine the round-robin sequence
int nextDirIdx = (dirAllocator.getCurrentDirectoryIndex() == 0) ? 3 : 4;
- validateTempDirCreation(nextDirIdx);
+ validateTempDirCreation(buildBufferDir(ROOT, nextDirIdx));
// change buffer directory 2 to be read only
- new File(BUFFER_DIR[4]).setReadOnly();
- validateTempDirCreation(3);
- validateTempDirCreation(3);
+ new File(new Path(dir4).toUri().getPath()).setReadOnly();
+ validateTempDirCreation(dir3);
+ validateTempDirCreation(dir3);
} finally {
rmBufferDirs();
}
}
-
+
/**
* Two buffer dirs, on read-write disk.
- *
+ *
* Try to create a whole bunch of files.
* Verify that they do indeed all get created where they should.
- *
+ *
* Would ideally check statistical properties of distribution, but
* we don't have the nerve to risk false-positives here.
- *
+ *
* @throws Exception
*/
static final int TRIALS = 100;
+ @Test
public void test4() throws Exception {
if (isWindows) return;
+ String dir5 = buildBufferDir(ROOT, 5);
+ String dir6 = buildBufferDir(ROOT, 6);
try {
- conf.set(CONTEXT, BUFFER_DIR[5]+","+BUFFER_DIR[6]);
- assertTrue(localFs.mkdirs(BUFFER_PATH[5]));
- assertTrue(localFs.mkdirs(BUFFER_PATH[6]));
-
+ conf.set(CONTEXT, dir5 + "," + dir6);
+ assertTrue(localFs.mkdirs(new Path(dir5)));
+ assertTrue(localFs.mkdirs(new Path(dir6)));
+
int inDir5=0, inDir6=0;
for(int i = 0; i < TRIALS; ++i) {
File result = createTempFile();
- if(result.getPath().startsWith(new File(BUFFER_DIR[5], FILENAME).getPath())) {
+ if(result.getPath().startsWith(
+ new Path(dir5, FILENAME).toUri().getPath())) {
inDir5++;
- } else if(result.getPath().startsWith(new File(BUFFER_DIR[6], FILENAME).getPath())) {
+ } else if(result.getPath().startsWith(
+ new Path(dir6, FILENAME).toUri().getPath())) {
inDir6++;
}
result.delete();
}
-
- assertTrue( inDir5 + inDir6 == TRIALS);
-
+
+ assertTrue(inDir5 + inDir6 == TRIALS);
+
} finally {
rmBufferDirs();
}
}
-
- /** Two buffer dirs. The first dir does not exist & is on a read-only disk;
+
+ /** Two buffer dirs. The first dir does not exist & is on a read-only disk;
* The second dir exists & is RW
* getLocalPathForWrite with checkAccess set to false should create a parent
* directory. With checkAccess true, the directory should not be created.
* @throws Exception
*/
+ @Test
public void testLocalPathForWriteDirCreation() throws IOException {
+ String dir0 = buildBufferDir(ROOT, 0);
+ String dir1 = buildBufferDir(ROOT, 1);
try {
- conf.set(CONTEXT, BUFFER_DIR[0] + "," + BUFFER_DIR[1]);
- assertTrue(localFs.mkdirs(BUFFER_PATH[1]));
+ conf.set(CONTEXT, dir0 + "," + dir1);
+ assertTrue(localFs.mkdirs(new Path(dir1)));
BUFFER_ROOT.setReadOnly();
Path p1 =
- dirAllocator.getLocalPathForWrite("p1/x", SMALL_FILE_SIZE, conf);
+ dirAllocator.getLocalPathForWrite("p1/x", SMALL_FILE_SIZE, conf);
assertTrue(localFs.getFileStatus(p1.getParent()).isDirectory());
Path p2 =
- dirAllocator.getLocalPathForWrite("p2/x", SMALL_FILE_SIZE, conf,
- false);
+ dirAllocator.getLocalPathForWrite("p2/x", SMALL_FILE_SIZE, conf,
+ false);
try {
localFs.getFileStatus(p2.getParent());
} catch (Exception e) {
@@ -237,5 +289,26 @@ public class TestLocalDirAllocator exten
rmBufferDirs();
}
}
-
+
+ /** Test no side effect files are left over. After creating a temp
+ * temp file, remove both the temp file and its parent. Verify that
+ * no files or directories are left over as can happen when File objects
+ * are mistakenly created from fully qualified path strings.
+ * @throws IOException
+ */
+ @Test
+ public void testNoSideEffects() throws IOException {
+ if (isWindows) return;
+ String dir = buildBufferDir(ROOT, 0);
+ try {
+ conf.set(CONTEXT, dir);
+ File result = dirAllocator.createTmpFileForWrite(FILENAME, -1, conf);
+ assertTrue(result.delete());
+ assertTrue(result.getParentFile().delete());
+ assertFalse(new File(dir).exists());
+ } finally {
+ Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+ rmBufferDirs();
+ }
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Thu Sep 29 00:09:56 2011
@@ -486,6 +486,9 @@ public class TestTrash extends TestCase
conf.set(FS_TRASH_INTERVAL_KEY, "0.2"); // 12 seconds
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
conf.set(FS_TRASH_CHECKPOINT_INTERVAL_KEY, "0.1"); // 6 seconds
+ FileSystem fs = FileSystem.getLocal(conf);
+ conf.set("fs.default.name", fs.getUri().toString());
+
Trash trash = new Trash(conf);
// Start Emptier in background
@@ -493,8 +496,6 @@ public class TestTrash extends TestCase
Thread emptierThread = new Thread(emptier);
emptierThread.start();
- FileSystem fs = FileSystem.getLocal(conf);
- conf.set("fs.defaultFS", fs.getUri().toString());
FsShell shell = new FsShell();
shell.setConf(conf);
shell.init();
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java Thu Sep 29 00:09:56 2011
@@ -71,6 +71,21 @@ public class HttpServerFunctionalTest ex
}
/**
+ * Create but do not start the test webapp server. The test webapp dir is
+ * prepared/checked in advance.
+ * @param conf the server configuration to use
+ * @return the server instance
+ *
+ * @throws IOException if a problem occurs
+ * @throws AssertionError if a condition was not met
+ */
+ public static HttpServer createTestServer(Configuration conf,
+ String[] pathSpecs) throws IOException {
+ prepareTestWebapp();
+ return createServer(TEST, conf, pathSpecs);
+ }
+
+ /**
* Prepare the test webapp by creating the directory from the test properties
* fail if the directory cannot be created.
* @throws AssertionError if a condition was not met
@@ -104,6 +119,18 @@ public class HttpServerFunctionalTest ex
throws IOException {
return new HttpServer(webapp, "0.0.0.0", 0, true, conf);
}
+ /**
+ * Create an HttpServer instance for the given webapp
+ * @param webapp the webapp to work with
+ * @param conf the configuration to use for the server
+ * @param pathSpecs the paths specifications the server will service
+ * @return the server
+ * @throws IOException if it could not be created
+ */
+ public static HttpServer createServer(String webapp, Configuration conf,
+ String[] pathSpecs) throws IOException {
+ return new HttpServer(webapp, "0.0.0.0", 0, true, conf, pathSpecs);
+ }
/**
* Create and start a server with the test webapp
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java Thu Sep 29 00:09:56 2011
@@ -18,13 +18,17 @@
package org.apache.hadoop.net;
import org.junit.Test;
+
import static org.junit.Assert.*;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
import java.net.Socket;
import java.net.ConnectException;
import java.net.SocketException;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
+import java.util.Enumeration;
import org.apache.hadoop.conf.Configuration;
@@ -88,4 +92,32 @@ public class TestNetUtils {
fail("NetUtils.verifyHostnames threw unexpected UnknownHostException");
}
}
+
+ /**
+ * Test for {@link NetUtils#isLocalAddress(java.net.InetAddress)}
+ */
+ @Test
+ public void testIsLocalAddress() throws Exception {
+ // Test - local host is local address
+ assertTrue(NetUtils.isLocalAddress(InetAddress.getLocalHost()));
+
+ // Test - all addresses bound network interface is local address
+ Enumeration<NetworkInterface> interfaces = NetworkInterface
+ .getNetworkInterfaces();
+ if (interfaces != null) { // Iterate through all network interfaces
+ while (interfaces.hasMoreElements()) {
+ NetworkInterface i = interfaces.nextElement();
+ Enumeration<InetAddress> addrs = i.getInetAddresses();
+ if (addrs == null) {
+ continue;
+ }
+ // Iterate through all the addresses of a network interface
+ while (addrs.hasMoreElements()) {
+ InetAddress addr = addrs.nextElement();
+ assertTrue(NetUtils.isLocalAddress(addr));
+ }
+ }
+ }
+ assertFalse(NetUtils.isLocalAddress(InetAddress.getByName("8.8.8.8")));
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java?rev=1177117&r1=1177116&r2=1177117&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestAuthenticationFilter.java Thu Sep 29 00:09:56 2011
@@ -25,14 +25,28 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
import java.util.Map;
public class TestAuthenticationFilter extends TestCase {
@SuppressWarnings("unchecked")
- public void testConfiguration() {
+ public void testConfiguration() throws Exception {
Configuration conf = new Configuration();
conf.set("hadoop.http.authentication.foo", "bar");
+
+ File testDir = new File(System.getProperty("test.build.data",
+ "target/test-dir"));
+ testDir.mkdirs();
+ File secretFile = new File(testDir, "http-secret.txt");
+ Writer writer = new FileWriter(new File(testDir, "http-secret.txt"));
+ writer.write("hadoop");
+ writer.close();
+ conf.set(AuthenticationFilterInitializer.PREFIX +
+ AuthenticationFilterInitializer.SIGNATURE_SECRET_FILE,
+ secretFile.getAbsolutePath());
FilterContainer container = Mockito.mock(FilterContainer.class);
Mockito.doAnswer(