You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2013/12/02 18:41:48 UTC
svn commit: r1547122 - in
/hadoop/common/branches/HDFS-2832/hadoop-common-project: hadoop-common/
hadoop-common/src/main/java/
hadoop-common/src/main/java/org/apache/hadoop/fs/
hadoop-common/src/main/java/org/apache/hadoop/http/
hadoop-common/src/main/...
Author: arp
Date: Mon Dec 2 17:41:44 2013
New Revision: 1547122
URL: http://svn.apache.org/r1547122
Log:
Merging r1544666 through r1547120 from trunk to branch HDFS-2832
Removed:
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapInterface.java
Modified:
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/portmap/TestPortmap.java
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt Mon Dec 2 17:41:44 2013
@@ -388,6 +388,8 @@ Release 2.3.0 - UNRELEASED
HADOOP-10111. Allow DU to be initialized with an initial value (Kihwal Lee
via jeagles)
+ HADOOP-10126. LightWeightGSet log message is confusing. (Vinay via suresh)
+
OPTIMIZATIONS
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
@@ -450,6 +452,9 @@ Release 2.3.0 - UNRELEASED
HADOOP-10107. Server.getNumOpenConnections may throw NPE. (Kihwal Lee via
jing9)
+ HADOOP-10135 writes to swift fs over partition size leave temp files and
+ empty output file (David Dobbins via stevel)
+
Release 2.2.1 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -467,6 +472,9 @@ Release 2.2.1 - UNRELEASED
HADOOP-9623 Update jets3t dependency to 0.9.0. (Amandeep Khurana via Colin
Patrick McCabe)
+ HADOOP-10132. RPC#stopProxy() should log the class of proxy when IllegalArgumentException
+ is encountered (Ted yu via umamahesh)
+
OPTIMIZATIONS
BUG FIXES
@@ -508,6 +516,9 @@ Release 2.2.1 - UNRELEASED
HADOOP-9114. After defined the dfs.checksum.type as the NULL, write file and hflush will
through java.lang.ArrayIndexOutOfBoundsException (Sathish via umamahesh)
+ HADOOP-10130. RawLocalFS::LocalFSFileInputStream.pread does not track
+ FS::Statistics (Binglin Chang via Colin Patrick McCabe)
+
Release 2.2.0 - 2013-10-13
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt:r1546957
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1544304-1547120
Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1544304-1547120
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Mon Dec 2 17:41:44 2013
@@ -83,39 +83,6 @@ public class RawLocalFileSystem extends
setConf(conf);
}
- class TrackingFileInputStream extends FileInputStream {
- public TrackingFileInputStream(File f) throws IOException {
- super(f);
- }
-
- @Override
- public int read() throws IOException {
- int result = super.read();
- if (result != -1) {
- statistics.incrementBytesRead(1);
- }
- return result;
- }
-
- @Override
- public int read(byte[] data) throws IOException {
- int result = super.read(data);
- if (result != -1) {
- statistics.incrementBytesRead(result);
- }
- return result;
- }
-
- @Override
- public int read(byte[] data, int offset, int length) throws IOException {
- int result = super.read(data, offset, length);
- if (result != -1) {
- statistics.incrementBytesRead(result);
- }
- return result;
- }
- }
-
/*******************************************************
* For open()'s FSInputStream.
*******************************************************/
@@ -124,7 +91,7 @@ public class RawLocalFileSystem extends
private long position;
public LocalFSFileInputStream(Path f) throws IOException {
- this.fis = new TrackingFileInputStream(pathToFile(f));
+ fis = new FileInputStream(pathToFile(f));
}
@Override
@@ -159,6 +126,7 @@ public class RawLocalFileSystem extends
int value = fis.read();
if (value >= 0) {
this.position++;
+ statistics.incrementBytesRead(1);
}
return value;
} catch (IOException e) { // unexpected exception
@@ -172,6 +140,7 @@ public class RawLocalFileSystem extends
int value = fis.read(b, off, len);
if (value > 0) {
this.position += value;
+ statistics.incrementBytesRead(value);
}
return value;
} catch (IOException e) { // unexpected exception
@@ -184,7 +153,11 @@ public class RawLocalFileSystem extends
throws IOException {
ByteBuffer bb = ByteBuffer.wrap(b, off, len);
try {
- return fis.getChannel().read(bb, position);
+ int value = fis.getChannel().read(bb, position);
+ if (value > 0) {
+ statistics.incrementBytesRead(value);
+ }
+ return value;
} catch (IOException e) {
throw new FSError(e);
}
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Mon Dec 2 17:41:44 2013
@@ -19,12 +19,13 @@ package org.apache.hadoop.http;
import java.io.FileNotFoundException;
import java.io.IOException;
-import java.io.PrintWriter;
import java.io.InterruptedIOException;
+import java.io.PrintWriter;
import java.net.BindException;
import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.net.URL;
-import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
@@ -32,7 +33,6 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import javax.net.ssl.SSLServerSocketFactory;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
@@ -60,7 +60,6 @@ import org.apache.hadoop.security.Securi
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Shell;
import org.mortbay.io.Buffer;
@@ -71,8 +70,8 @@ import org.mortbay.jetty.RequestLog;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.handler.ContextHandler;
import org.mortbay.jetty.handler.ContextHandlerCollection;
-import org.mortbay.jetty.handler.RequestLogHandler;
import org.mortbay.jetty.handler.HandlerCollection;
+import org.mortbay.jetty.handler.RequestLogHandler;
import org.mortbay.jetty.nio.SelectChannelConnector;
import org.mortbay.jetty.security.SslSocketConnector;
import org.mortbay.jetty.servlet.Context;
@@ -86,6 +85,7 @@ import org.mortbay.thread.QueuedThreadPo
import org.mortbay.util.MultiException;
import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
import com.sun.jersey.spi.container.servlet.ServletContainer;
/**
@@ -114,11 +114,25 @@ public class HttpServer implements Filte
public static final String BIND_ADDRESS = "bind.address";
- private AccessControlList adminsAcl;
+ private final AccessControlList adminsAcl;
- private SSLFactory sslFactory;
protected final Server webServer;
- protected final Connector listener;
+
+ private static class ListenerInfo {
+ /**
+ * Boolean flag to determine whether the HTTP server should clean up the
+ * listener in stop().
+ */
+ private final boolean isManaged;
+ private final Connector listener;
+ private ListenerInfo(boolean isManaged, Connector listener) {
+ this.isManaged = isManaged;
+ this.listener = listener;
+ }
+ }
+
+ private final List<ListenerInfo> listeners = Lists.newArrayList();
+
protected final WebAppContext webAppContext;
protected final boolean findPort;
protected final Map<Context, Boolean> defaultContexts =
@@ -127,34 +141,111 @@ public class HttpServer implements Filte
static final String STATE_DESCRIPTION_ALIVE = " - alive";
static final String STATE_DESCRIPTION_NOT_LIVE = " - not live";
- private final boolean listenerStartedExternally;
-
/**
* Class to construct instances of HTTP server with specific options.
*/
public static class Builder {
- String name;
- String bindAddress;
- Integer port;
- Boolean findPort;
- Configuration conf;
- Connector connector;
- String[] pathSpecs;
- AccessControlList adminsAcl;
- boolean securityEnabled = false;
- String usernameConfKey = null;
- String keytabConfKey = null;
-
+ private ArrayList<URI> endpoints = Lists.newArrayList();
+ private Connector connector;
+ private String name;
+ private Configuration conf;
+ private String[] pathSpecs;
+ private AccessControlList adminsAcl;
+ private boolean securityEnabled = false;
+ private String usernameConfKey;
+ private String keytabConfKey;
+ private boolean needsClientAuth;
+ private String trustStore;
+ private String trustStorePassword;
+ private String trustStoreType;
+
+ private String keyStore;
+ private String keyStorePassword;
+ private String keyStoreType;
+
+ // The -keypass option in keytool
+ private String keyPassword;
+
+ @Deprecated
+ private String bindAddress;
+ @Deprecated
+ private int port = -1;
+
+ private boolean findPort;
+
+ private String hostName;
+
public Builder setName(String name){
this.name = name;
return this;
}
+
+ /**
+ * Add an endpoint that the HTTP server should listen to.
+ *
+ * @param endpoint
+ * the endpoint of that the HTTP server should listen to. The
+ * scheme specifies the protocol (i.e. HTTP / HTTPS), the host
+ * specifies the binding address, and the port specifies the
+ * listening port. Unspecified or zero port means that the server
+ * can listen to any port.
+ */
+ public Builder addEndpoint(URI endpoint) {
+ endpoints.add(endpoint);
+ return this;
+ }
+
+ /**
+ * Set the hostname of the http server. The host name is used to resolve the
+ * _HOST field in Kerberos principals. The hostname of the first listener
+ * will be used if the name is unspecified.
+ */
+ public Builder hostName(String hostName) {
+ this.hostName = hostName;
+ return this;
+ }
+ public Builder trustStore(String location, String password, String type) {
+ this.trustStore = location;
+ this.trustStorePassword = password;
+ this.trustStoreType = type;
+ return this;
+ }
+
+ public Builder keyStore(String location, String password, String type) {
+ this.keyStore = location;
+ this.keyStorePassword = password;
+ this.keyStoreType = type;
+ return this;
+ }
+
+ public Builder keyPassword(String password) {
+ this.keyPassword = password;
+ return this;
+ }
+
+ /**
+ * Specify whether the server should authorize the client in SSL
+ * connections.
+ */
+ public Builder needsClientAuth(boolean value) {
+ this.needsClientAuth = value;
+ return this;
+ }
+
+ /**
+ * Use addEndpoint() instead.
+ */
+ @Deprecated
public Builder setBindAddress(String bindAddress){
this.bindAddress = bindAddress;
return this;
}
-
+
+ /**
+ * Use addEndpoint() instead.
+ */
+ @Deprecated
public Builder setPort(int port) {
this.port = port;
return this;
@@ -204,25 +295,70 @@ public class HttpServer implements Filte
if (this.name == null) {
throw new HadoopIllegalArgumentException("name is not set");
}
- if (this.bindAddress == null) {
- throw new HadoopIllegalArgumentException("bindAddress is not set");
+
+ // Make the behavior compatible with deprecated interfaces
+ if (bindAddress != null && port != -1) {
+ try {
+ endpoints.add(0, new URI("http", "", bindAddress, port, "", "", ""));
+ } catch (URISyntaxException e) {
+ throw new HadoopIllegalArgumentException("Invalid endpoint: "+ e);
+ }
}
- if (this.port == null) {
- throw new HadoopIllegalArgumentException("port is not set");
+
+ if (endpoints.size() == 0) {
+ throw new HadoopIllegalArgumentException("No endpoints specified");
}
- if (this.findPort == null) {
- throw new HadoopIllegalArgumentException("findPort is not set");
+
+ if (hostName == null) {
+ hostName = endpoints.get(0).getHost();
}
if (this.conf == null) {
conf = new Configuration();
}
- HttpServer server = new HttpServer(this.name, this.bindAddress, this.port,
- this.findPort, this.conf, this.adminsAcl, this.connector, this.pathSpecs);
+ HttpServer server = new HttpServer(this);
+
if (this.securityEnabled) {
- server.initSpnego(this.conf, this.usernameConfKey, this.keytabConfKey);
+ server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
+ }
+
+ if (connector != null) {
+ server.addUnmanagedListener(connector);
+ }
+
+ for (URI ep : endpoints) {
+ Connector listener = null;
+ String scheme = ep.getScheme();
+ if ("http".equals(scheme)) {
+ listener = HttpServer.createDefaultChannelConnector();
+ } else if ("https".equals(scheme)) {
+ SslSocketConnector c = new SslSocketConnector();
+ c.setNeedClientAuth(needsClientAuth);
+ c.setKeyPassword(keyPassword);
+
+ if (keyStore != null) {
+ c.setKeystore(keyStore);
+ c.setKeystoreType(keyStoreType);
+ c.setPassword(keyStorePassword);
+ }
+
+ if (trustStore != null) {
+ c.setTruststore(trustStore);
+ c.setTruststoreType(trustStoreType);
+ c.setTrustPassword(trustStorePassword);
+ }
+ listener = c;
+
+ } else {
+ throw new HadoopIllegalArgumentException(
+ "unknown scheme for endpoint:" + ep);
+ }
+ listener.setHost(ep.getHost());
+ listener.setPort(ep.getPort() == -1 ? 0 : ep.getPort());
+ server.addManagedListener(listener);
}
+ server.loadListeners();
return server;
}
}
@@ -233,7 +369,7 @@ public class HttpServer implements Filte
) throws IOException {
this(name, bindAddress, port, findPort, new Configuration());
}
-
+
@Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, Connector connector) throws IOException {
@@ -314,51 +450,39 @@ public class HttpServer implements Filte
* @param pathSpecs Path specifications that this httpserver will be serving.
* These will be added to any filters.
*/
+ @Deprecated
public HttpServer(String name, String bindAddress, int port,
boolean findPort, Configuration conf, AccessControlList adminsAcl,
Connector connector, String[] pathSpecs) throws IOException {
- webServer = new Server();
- this.findPort = findPort;
- this.adminsAcl = adminsAcl;
-
- if(connector == null) {
- listenerStartedExternally = false;
- if (HttpConfig.isSecure()) {
- sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf);
- try {
- sslFactory.init();
- } catch (GeneralSecurityException ex) {
- throw new IOException(ex);
- }
- SslSocketConnector sslListener = new SslSocketConnector() {
- @Override
- protected SSLServerSocketFactory createFactory() throws Exception {
- return sslFactory.createSSLServerSocketFactory();
- }
- };
- listener = sslListener;
- } else {
- listener = createBaseListener(conf);
- }
- listener.setHost(bindAddress);
- listener.setPort(port);
- LOG.info("SSL is enabled on " + toString());
- } else {
- listenerStartedExternally = true;
- listener = connector;
- }
-
- webServer.addConnector(listener);
+ this(new Builder().setName(name)
+ .addEndpoint(URI.create("http://" + bindAddress + ":" + port))
+ .setFindPort(findPort).setConf(conf).setACL(adminsAcl)
+ .setConnector(connector).setPathSpec(pathSpecs));
+ }
+
+ private HttpServer(final Builder b) throws IOException {
+ final String appDir = getWebAppsPath(b.name);
+ this.webServer = new Server();
+ this.adminsAcl = b.adminsAcl;
+ this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir);
+ this.findPort = b.findPort;
+ initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs);
+ }
+
+ private void initializeWebServer(String name, String hostName,
+ Configuration conf, String[] pathSpecs)
+ throws FileNotFoundException, IOException {
+
+ Preconditions.checkNotNull(webAppContext);
int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1);
// If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the
// default value (currently 250).
- QueuedThreadPool threadPool = maxThreads == -1 ?
- new QueuedThreadPool() : new QueuedThreadPool(maxThreads);
+ QueuedThreadPool threadPool = maxThreads == -1 ? new QueuedThreadPool()
+ : new QueuedThreadPool(maxThreads);
threadPool.setDaemon(true);
webServer.setThreadPool(threadPool);
- final String appDir = getWebAppsPath(name);
ContextHandlerCollection contexts = new ContextHandlerCollection();
RequestLog requestLog = HttpRequestLog.getRequestLog(name);
@@ -366,30 +490,24 @@ public class HttpServer implements Filte
RequestLogHandler requestLogHandler = new RequestLogHandler();
requestLogHandler.setRequestLog(requestLog);
HandlerCollection handlers = new HandlerCollection();
- handlers.setHandlers(new Handler[] {requestLogHandler, contexts});
+ handlers.setHandlers(new Handler[] { requestLogHandler, contexts });
webServer.setHandler(handlers);
- }
- else {
+ } else {
webServer.setHandler(contexts);
}
- webAppContext = new WebAppContext();
- webAppContext.setDisplayName(name);
- webAppContext.setContextPath("/");
- webAppContext.setWar(appDir + "/" + name);
- webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
- webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
- addNoCacheFilter(webAppContext);
+ final String appDir = getWebAppsPath(name);
+
webServer.addHandler(webAppContext);
addDefaultApps(contexts, appDir, conf);
-
+
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
- final FilterInitializer[] initializers = getFilterInitializers(conf);
+ final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
conf = new Configuration(conf);
- conf.set(BIND_ADDRESS, bindAddress);
- for(FilterInitializer c : initializers) {
+ conf.set(BIND_ADDRESS, hostName);
+ for (FilterInitializer c : initializers) {
c.initFilter(this, conf);
}
}
@@ -404,10 +522,29 @@ public class HttpServer implements Filte
}
}
- @SuppressWarnings("unchecked")
- private void addNoCacheFilter(WebAppContext ctxt) {
- defineFilter(ctxt, NO_CACHE_FILTER,
- NoCacheFilter.class.getName(), Collections.EMPTY_MAP, new String[] { "/*"});
+ private void addUnmanagedListener(Connector connector) {
+ listeners.add(new ListenerInfo(false, connector));
+ }
+
+ private void addManagedListener(Connector connector) {
+ listeners.add(new ListenerInfo(true, connector));
+ }
+
+ private static WebAppContext createWebAppContext(String name,
+ Configuration conf, AccessControlList adminsAcl, final String appDir) {
+ WebAppContext ctx = new WebAppContext();
+ ctx.setDisplayName(name);
+ ctx.setContextPath("/");
+ ctx.setWar(appDir + "/" + name);
+ ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+ ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
+ addNoCacheFilter(ctx);
+ return ctx;
+ }
+
+ private static void addNoCacheFilter(WebAppContext ctxt) {
+ defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
+ Collections.<String, String> emptyMap(), new String[] { "/*" });
}
/**
@@ -651,7 +788,7 @@ public class HttpServer implements Filte
/**
* Define a filter for a context and set up default url mappings.
*/
- public void defineFilter(Context ctx, String name,
+ public static void defineFilter(Context ctx, String name,
String classname, Map<String,String> parameters, String[] urls) {
FilterHolder holder = new FilterHolder();
@@ -715,93 +852,47 @@ public class HttpServer implements Filte
* Get the port that the server is on
* @return the port
*/
+ @Deprecated
public int getPort() {
return webServer.getConnectors()[0].getLocalPort();
}
/**
- * Get the port that corresponds to a particular connector. In the case of
- * HDFS, the second connector corresponds to the HTTPS connector.
+ * Get the address that corresponds to a particular connector.
*
- * @return the corresponding port for the connector, or -1 if there's no such
- * connector.
+ * @return the corresponding address for the connector, or null if there's no
+ * such connector or the connector is not bounded.
*/
- public int getConnectorPort(int index) {
+ public InetSocketAddress getConnectorAddress(int index) {
Preconditions.checkArgument(index >= 0);
- return index < webServer.getConnectors().length ?
- webServer.getConnectors()[index].getLocalPort() : -1;
+ if (index > webServer.getConnectors().length)
+ return null;
+
+ Connector c = webServer.getConnectors()[index];
+ if (c.getLocalPort() == -1) {
+ // The connector is not bounded
+ return null;
+ }
+
+ return new InetSocketAddress(c.getHost(), c.getLocalPort());
}
/**
* Set the min, max number of worker threads (simultaneous connections).
*/
public void setThreads(int min, int max) {
- QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool() ;
+ QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
pool.setMinThreads(min);
pool.setMaxThreads(max);
}
- /**
- * Configure an ssl listener on the server.
- * @param addr address to listen on
- * @param keystore location of the keystore
- * @param storPass password for the keystore
- * @param keyPass password for the key
- * @deprecated Use {@link #addSslListener(InetSocketAddress, Configuration, boolean)}
- */
- @Deprecated
- public void addSslListener(InetSocketAddress addr, String keystore,
- String storPass, String keyPass) throws IOException {
- if (webServer.isStarted()) {
- throw new IOException("Failed to add ssl listener");
- }
- SslSocketConnector sslListener = new SslSocketConnector();
- sslListener.setHost(addr.getHostName());
- sslListener.setPort(addr.getPort());
- sslListener.setKeystore(keystore);
- sslListener.setPassword(storPass);
- sslListener.setKeyPassword(keyPass);
- webServer.addConnector(sslListener);
- }
-
- /**
- * Configure an ssl listener on the server.
- * @param addr address to listen on
- * @param sslConf conf to retrieve ssl options
- * @param needCertsAuth whether x509 certificate authentication is required
- */
- public void addSslListener(InetSocketAddress addr, Configuration sslConf,
- boolean needCertsAuth) throws IOException {
- if (webServer.isStarted()) {
- throw new IOException("Failed to add ssl listener");
- }
- if (needCertsAuth) {
- // setting up SSL truststore for authenticating clients
- System.setProperty("javax.net.ssl.trustStore", sslConf.get(
- "ssl.server.truststore.location", ""));
- System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get(
- "ssl.server.truststore.password", ""));
- System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
- "ssl.server.truststore.type", "jks"));
- }
- SslSocketConnector sslListener = new SslSocketConnector();
- sslListener.setHost(addr.getHostName());
- sslListener.setPort(addr.getPort());
- sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
- sslListener.setPassword(sslConf.get("ssl.server.keystore.password", ""));
- sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword", ""));
- sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks"));
- sslListener.setNeedClientAuth(needCertsAuth);
- webServer.addConnector(sslListener);
- }
-
- protected void initSpnego(Configuration conf,
+ private void initSpnego(Configuration conf, String hostName,
String usernameConfKey, String keytabConfKey) throws IOException {
Map<String, String> params = new HashMap<String, String>();
String principalInConf = conf.get(usernameConfKey);
if (principalInConf != null && !principalInConf.isEmpty()) {
- params.put("kerberos.principal",
- SecurityUtil.getServerPrincipal(principalInConf, listener.getHost()));
+ params.put("kerberos.principal", SecurityUtil.getServerPrincipal(
+ principalInConf, hostName));
}
String httpKeytab = conf.get(keytabConfKey);
if (httpKeytab != null && !httpKeytab.isEmpty()) {
@@ -819,8 +910,7 @@ public class HttpServer implements Filte
public void start() throws IOException {
try {
try {
- openListener();
- LOG.info("Jetty bound to port " + listener.getLocalPort());
+ openListeners();
webServer.start();
} catch (IOException ex) {
LOG.info("HttpServer.start() threw a non Bind IOException", ex);
@@ -856,50 +946,45 @@ public class HttpServer implements Filte
}
}
+ private void loadListeners() {
+ for (ListenerInfo li : listeners) {
+ webServer.addConnector(li.listener);
+ }
+ }
+
/**
* Open the main listener for the server
* @throws Exception
*/
- void openListener() throws Exception {
- if (listener.getLocalPort() != -1) { // it's already bound
- return;
- }
- if (listenerStartedExternally) { // Expect that listener was started securely
- throw new Exception("Expected webserver's listener to be started " +
- "previously but wasn't");
- }
- int port = listener.getPort();
- while (true) {
- // jetty has a bug where you can't reopen a listener that previously
- // failed to open w/o issuing a close first, even if the port is changed
- try {
- listener.close();
- listener.open();
- break;
- } catch (BindException ex) {
- if (port == 0 || !findPort) {
- BindException be = new BindException(
- "Port in use: " + listener.getHost() + ":" + listener.getPort());
- be.initCause(ex);
- throw be;
+ void openListeners() throws Exception {
+ for (ListenerInfo li : listeners) {
+ Connector listener = li.listener;
+ if (!li.isManaged || li.listener.getLocalPort() != -1) {
+ // This listener is either started externally or has been bound
+ continue;
+ }
+ int port = listener.getPort();
+ while (true) {
+ // jetty has a bug where you can't reopen a listener that previously
+ // failed to open w/o issuing a close first, even if the port is changed
+ try {
+ listener.close();
+ listener.open();
+ LOG.info("Jetty bound to port " + listener.getLocalPort());
+ break;
+ } catch (BindException ex) {
+ if (port == 0 || !findPort) {
+ BindException be = new BindException("Port in use: "
+ + listener.getHost() + ":" + listener.getPort());
+ be.initCause(ex);
+ throw be;
+ }
}
+ // try the next port number
+ listener.setPort(++port);
+ Thread.sleep(100);
}
- // try the next port number
- listener.setPort(++port);
- Thread.sleep(100);
- }
- }
-
- /**
- * Return the bind address of the listener.
- * @return InetSocketAddress of the listener
- */
- public InetSocketAddress getListenerAddress() {
- int port = listener.getLocalPort();
- if (port == -1) { // not bound, return requested port
- port = listener.getPort();
}
- return new InetSocketAddress(listener.getHost(), port);
}
/**
@@ -907,22 +992,19 @@ public class HttpServer implements Filte
*/
public void stop() throws Exception {
MultiException exception = null;
- try {
- listener.close();
- } catch (Exception e) {
- LOG.error("Error while stopping listener for webapp"
- + webAppContext.getDisplayName(), e);
- exception = addMultiException(exception, e);
- }
+ for (ListenerInfo li : listeners) {
+ if (!li.isManaged) {
+ continue;
+ }
- try {
- if (sslFactory != null) {
- sslFactory.destroy();
+ try {
+ li.listener.close();
+ } catch (Exception e) {
+ LOG.error(
+ "Error while stopping listener for webapp"
+ + webAppContext.getDisplayName(), e);
+ exception = addMultiException(exception, e);
}
- } catch (Exception e) {
- LOG.error("Error while destroying the SSLFactory"
- + webAppContext.getDisplayName(), e);
- exception = addMultiException(exception, e);
}
try {
@@ -934,6 +1016,7 @@ public class HttpServer implements Filte
+ webAppContext.getDisplayName(), e);
exception = addMultiException(exception, e);
}
+
try {
webServer.stop();
} catch (Exception e) {
@@ -974,10 +1057,17 @@ public class HttpServer implements Filte
*/
@Override
public String toString() {
- return listener != null ?
- ("HttpServer at http://" + listener.getHost() + ":" + listener.getLocalPort() + "/"
- + (isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE))
- : "Inactive HttpServer";
+ if (listeners.size() == 0) {
+ return "Inactive HttpServer";
+ } else {
+ StringBuilder sb = new StringBuilder("HttpServer (")
+ .append(isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE).append("), listening at:");
+ for (ListenerInfo li : listeners) {
+ Connector l = li.listener;
+ sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
+ }
+ return sb.toString();
+ }
}
/**
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Mon Dec 2 17:41:44 2013
@@ -142,6 +142,10 @@ public class NativeIO {
NativeIO.POSIX.posixFadviseIfPossible(identifier, fd, offset,
len, flags);
}
+
+ public boolean verifyCanMlock() {
+ return NativeIO.isAvailable();
+ }
}
/**
@@ -163,6 +167,10 @@ public class NativeIO {
public long getOperatingSystemPageSize() {
return 4096;
}
+
+ public boolean verifyCanMlock() {
+ return true;
+ }
}
static {
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Mon Dec 2 17:41:44 2013
@@ -634,7 +634,7 @@ public class RPC {
} catch (IOException e) {
LOG.error("Closing proxy or invocation handler caused exception", e);
} catch (IllegalArgumentException e) {
- LOG.error("RPC.stopProxy called on non proxy.", e);
+ LOG.error("RPC.stopProxy called on non proxy: class=" + proxy.getClass().getName(), e);
}
// If you see this error on a mock object in a unit test you're
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Mon Dec 2 17:41:44 2013
@@ -22,19 +22,14 @@ import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
-import java.net.URL;
-import java.net.URLConnection;
import java.net.UnknownHostException;
-import java.security.AccessController;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.ServiceLoader;
-import java.util.Set;
-import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.kerberos.KerberosTicket;
@@ -44,22 +39,19 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
-import org.apache.hadoop.security.authentication.client.AuthenticationException;
-import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo;
-import com.google.common.annotations.VisibleForTesting;
//this will need to be replaced someday when there is a suitable replacement
import sun.net.dns.ResolverConfiguration;
import sun.net.util.IPAddressUtil;
+import com.google.common.annotations.VisibleForTesting;
+
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public class SecurityUtil {
@@ -73,24 +65,14 @@ public class SecurityUtil {
@VisibleForTesting
static HostResolver hostResolver;
- private static SSLFactory sslFactory;
-
static {
Configuration conf = new Configuration();
boolean useIp = conf.getBoolean(
- CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP,
- CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP_DEFAULT);
+ CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP,
+ CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP_DEFAULT);
setTokenServiceUseIp(useIp);
- if (HttpConfig.isSecure()) {
- sslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
- try {
- sslFactory.init();
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- }
}
-
+
/**
* For use only by tests and initialization
*/
@@ -103,29 +85,6 @@ public class SecurityUtil {
}
/**
- * Find the original TGT within the current subject's credentials. Cross-realm
- * TGT's of the form "krbtgt/TWO.COM@ONE.COM" may be present.
- *
- * @return The TGT from the current subject
- * @throws IOException
- * if TGT can't be found
- */
- private static KerberosTicket getTgtFromSubject() throws IOException {
- Subject current = Subject.getSubject(AccessController.getContext());
- if (current == null) {
- throw new IOException(
- "Can't get TGT from current Subject, because it is null");
- }
- Set<KerberosTicket> tickets = current
- .getPrivateCredentials(KerberosTicket.class);
- for (KerberosTicket t : tickets) {
- if (isOriginalTGT(t))
- return t;
- }
- throw new IOException("Failed to find TGT from current Subject:"+current);
- }
-
- /**
* TGS must have the server principal of the form "krbtgt/FOO@FOO".
* @param principal
* @return true or false
@@ -493,30 +452,6 @@ public class SecurityUtil {
}
/**
- * Open a (if need be) secure connection to a URL in a secure environment
- * that is using SPNEGO to authenticate its URLs. All Namenode and Secondary
- * Namenode URLs that are protected via SPNEGO should be accessed via this
- * method.
- *
- * @param url to authenticate via SPNEGO.
- * @return A connection that has been authenticated via SPNEGO
- * @throws IOException If unable to authenticate via SPNEGO
- */
- public static URLConnection openSecureHttpConnection(URL url) throws IOException {
- if (!HttpConfig.isSecure() && !UserGroupInformation.isSecurityEnabled()) {
- return url.openConnection();
- }
-
- AuthenticatedURL.Token token = new AuthenticatedURL.Token();
- try {
- return new AuthenticatedURL(null, sslFactory).openConnection(url, token);
- } catch (AuthenticationException e) {
- throw new IOException("Exception trying to open authenticated connection to "
- + url, e);
- }
- }
-
- /**
* Resolves a host subject to the security requirements determined by
* hadoop.security.token.service.use_ip.
*
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LightWeightGSet.java Mon Dec 2 17:41:44 2013
@@ -348,8 +348,11 @@ public class LightWeightGSet<K, E extend
LOG.info("Computing capacity for map " + mapName);
LOG.info("VM type = " + vmBit + "-bit");
- LOG.info(percentage + "% max memory = "
- + StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1));
+ LOG.info(percentage + "% max memory "
+ + StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1)
+ + " = "
+ + StringUtils.TraditionalBinaryPrefix.long2String((long) percentMemory,
+ "B", 1));
LOG.info("capacity = 2^" + exponent + " = " + c + " entries");
return c;
}
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java Mon Dec 2 17:41:44 2013
@@ -91,6 +91,7 @@ public abstract class FCStatisticsBaseTe
FSDataInputStream fstr = fc.open(filePath);
byte[] buf = new byte[blockSize];
int bytesRead = fstr.read(buf, 0, blockSize);
+ fstr.read(0, buf, 0, blockSize);
Assert.assertEquals(blockSize, bytesRead);
verifyReadBytes(stats);
verifyWrittenBytes(stats);
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java Mon Dec 2 17:41:44 2013
@@ -47,7 +47,8 @@ public class TestLocalFsFCStatistics ext
@Override
protected void verifyReadBytes(Statistics stats) {
- Assert.assertEquals(blockSize, stats.getBytesRead());
+ // one blockSize for read, one for pread
+ Assert.assertEquals(2*blockSize, stats.getBytesRead());
}
@Override
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/HttpServerFunctionalTest.java Mon Dec 2 17:41:44 2013
@@ -19,13 +19,16 @@
package org.apache.hadoop.http;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.HttpServer.Builder;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.net.URI;
import java.net.URL;
import java.net.MalformedURLException;
@@ -120,8 +123,9 @@ public class HttpServerFunctionalTest ex
public static HttpServer createServer(String host, int port)
throws IOException {
prepareTestWebapp();
- return new HttpServer.Builder().setName(TEST).setBindAddress(host)
- .setPort(port).setFindPort(true).build();
+ return new HttpServer.Builder().setName(TEST)
+ .addEndpoint(URI.create("http://" + host + ":" + port))
+ .setFindPort(true).build();
}
/**
@@ -131,8 +135,7 @@ public class HttpServerFunctionalTest ex
* @throws IOException if it could not be created
*/
public static HttpServer createServer(String webapp) throws IOException {
- return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
- .setPort(0).setFindPort(true).build();
+ return localServerBuilder(webapp).setFindPort(true).build();
}
/**
* Create an HttpServer instance for the given webapp
@@ -143,14 +146,17 @@ public class HttpServerFunctionalTest ex
*/
public static HttpServer createServer(String webapp, Configuration conf)
throws IOException {
- return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
- .setPort(0).setFindPort(true).setConf(conf).build();
+ return localServerBuilder(webapp).setFindPort(true).setConf(conf).build();
}
public static HttpServer createServer(String webapp, Configuration conf, AccessControlList adminsAcl)
throws IOException {
- return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
- .setPort(0).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
+ return localServerBuilder(webapp).setFindPort(true).setConf(conf).setACL(adminsAcl).build();
+ }
+
+ private static Builder localServerBuilder(String webapp) {
+ return new HttpServer.Builder().setName(webapp).addEndpoint(
+ URI.create("http://localhost:0"));
}
/**
@@ -163,8 +169,7 @@ public class HttpServerFunctionalTest ex
*/
public static HttpServer createServer(String webapp, Configuration conf,
String[] pathSpecs) throws IOException {
- return new HttpServer.Builder().setName(webapp).setBindAddress("0.0.0.0")
- .setPort(0).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
+ return localServerBuilder(webapp).setFindPort(true).setConf(conf).setPathSpec(pathSpecs).build();
}
/**
@@ -201,8 +206,8 @@ public class HttpServerFunctionalTest ex
public static URL getServerURL(HttpServer server)
throws MalformedURLException {
assertNotNull("No server", server);
- int port = server.getPort();
- return new URL("http://localhost:" + port + "/");
+ return new URL("http://"
+ + NetUtils.getHostPortString(server.getConnectorAddress(0)));
}
/**
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java Mon Dec 2 17:41:44 2013
@@ -36,6 +36,7 @@ import javax.servlet.http.HttpServletReq
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
public class TestGlobalFilter extends HttpServerFunctionalTest {
@@ -125,7 +126,8 @@ public class TestGlobalFilter extends Ht
dataURL, streamFile, rootURL, allURL, outURL, logURL};
//access the urls
- final String prefix = "http://localhost:" + http.getPort();
+ final String prefix = "http://"
+ + NetUtils.getHostPortString(http.getConnectorAddress(0));
try {
for(int i = 0; i < urls.length; i++) {
access(prefix + urls[i]);
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java Mon Dec 2 17:41:44 2013
@@ -20,7 +20,7 @@ package org.apache.hadoop.http;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.HttpURLConnection;
-import java.net.InetSocketAddress;
+import java.net.URI;
import java.net.URL;
import java.util.Arrays;
import java.util.Enumeration;
@@ -53,6 +53,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.http.HttpServer.QuotingInputFilter.RequestQuoter;
import org.apache.hadoop.http.resource.JerseyResource;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.apache.hadoop.security.UserGroupInformation;
@@ -61,6 +62,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
+import org.mockito.internal.util.reflection.Whitebox;
+import org.mortbay.jetty.Connector;
import org.mortbay.util.ajax.JSON;
public class TestHttpServer extends HttpServerFunctionalTest {
@@ -362,11 +365,10 @@ public class TestHttpServer extends Http
MyGroupsProvider.mapping.put("userB", Arrays.asList("groupB"));
HttpServer myServer = new HttpServer.Builder().setName("test")
- .setBindAddress("0.0.0.0").setPort(0).setFindPort(true).build();
+ .addEndpoint(new URI("http://localhost:0")).setFindPort(true).build();
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
myServer.start();
- int port = myServer.getPort();
- String serverURL = "http://localhost:" + port + "/";
+ String serverURL = "http://" + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
for (String servlet : new String[] { "conf", "logs", "stacks",
"logLevel", "metrics" }) {
for (String user : new String[] { "userA", "userB" }) {
@@ -404,12 +406,13 @@ public class TestHttpServer extends Http
MyGroupsProvider.mapping.put("userE", Arrays.asList("groupE"));
HttpServer myServer = new HttpServer.Builder().setName("test")
- .setBindAddress("0.0.0.0").setPort(0).setFindPort(true).setConf(conf)
+ .addEndpoint(new URI("http://localhost:0")).setFindPort(true).setConf(conf)
.setACL(new AccessControlList("userA,userB groupC,groupD")).build();
myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf);
myServer.start();
- int port = myServer.getPort();
- String serverURL = "http://localhost:" + port + "/";
+
+ String serverURL = "http://"
+ + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
for (String servlet : new String[] { "conf", "logs", "stacks",
"logLevel", "metrics" }) {
for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
@@ -520,20 +523,20 @@ public class TestHttpServer extends Http
}
@Test public void testBindAddress() throws Exception {
- checkBindAddress("0.0.0.0", 0, false).stop();
+ checkBindAddress("localhost", 0, false).stop();
// hang onto this one for a bit more testing
HttpServer myServer = checkBindAddress("localhost", 0, false);
HttpServer myServer2 = null;
try {
- int port = myServer.getListenerAddress().getPort();
+ int port = myServer.getConnectorAddress(0).getPort();
// it's already in use, true = expect a higher port
myServer2 = checkBindAddress("localhost", port, true);
// try to reuse the port
- port = myServer2.getListenerAddress().getPort();
+ port = myServer2.getConnectorAddress(0).getPort();
myServer2.stop();
- assertEquals(-1, myServer2.getPort()); // not bound
- myServer2.openListener();
- assertEquals(port, myServer2.getPort()); // expect same port
+ assertNull(myServer2.getConnectorAddress(0)); // not bound
+ myServer2.openListeners();
+ assertEquals(port, myServer2.getConnectorAddress(0).getPort()); // expect same port
} finally {
myServer.stop();
if (myServer2 != null) {
@@ -547,21 +550,24 @@ public class TestHttpServer extends Http
HttpServer server = createServer(host, port);
try {
// not bound, ephemeral should return requested port (0 for ephemeral)
- InetSocketAddress addr = server.getListenerAddress();
- assertEquals(port, addr.getPort());
+ List<?> listeners = (List<?>) Whitebox.getInternalState(server,
+ "listeners");
+ Connector listener = (Connector) Whitebox.getInternalState(
+ listeners.get(0), "listener");
+
+ assertEquals(port, listener.getPort());
// verify hostname is what was given
- server.openListener();
- addr = server.getListenerAddress();
- assertEquals(host, addr.getHostName());
+ server.openListeners();
+ assertEquals(host, server.getConnectorAddress(0).getHostName());
- int boundPort = addr.getPort();
+ int boundPort = server.getConnectorAddress(0).getPort();
if (port == 0) {
assertTrue(boundPort != 0); // ephemeral should now return bound port
} else if (findPort) {
assertTrue(boundPort > port);
// allow a little wiggle room to prevent random test failures if
// some consecutive ports are already in use
- assertTrue(addr.getPort() - port < 8);
+ assertTrue(boundPort - port < 8);
}
} catch (Exception e) {
server.stop();
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java Mon Dec 2 17:41:44 2013
@@ -36,6 +36,7 @@ import javax.servlet.http.HttpServletReq
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
public class TestPathFilter extends HttpServerFunctionalTest {
@@ -126,7 +127,8 @@ public class TestPathFilter extends Http
// access the urls and verify our paths specs got added to the
// filters
- final String prefix = "http://localhost:" + http.getPort();
+ final String prefix = "http://"
+ + NetUtils.getHostPortString(http.getConnectorAddress(0));
try {
for(int i = 0; i < filteredUrls.length; i++) {
access(prefix + filteredUrls[i]);
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java Mon Dec 2 17:41:44 2013
@@ -17,105 +17,101 @@
*/
package org.apache.hadoop.http;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URL;
+
+import javax.net.ssl.HttpsURLConnection;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.security.ssl.SSLFactory;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
import org.junit.Test;
-import javax.net.ssl.HttpsURLConnection;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.InputStream;
-import java.io.Writer;
-import java.net.URL;
-
/**
* This testcase issues SSL certificates configures the HttpServer to serve
* HTTPS using the created certficates and calls an echo servlet using the
* corresponding HTTPS URL.
*/
public class TestSSLHttpServer extends HttpServerFunctionalTest {
- private static final String CONFIG_SITE_XML = "sslhttpserver-site.xml";
-
- private static final String BASEDIR =
- System.getProperty("test.build.dir", "target/test-dir") + "/" +
- TestSSLHttpServer.class.getSimpleName();
+ private static final String BASEDIR = System.getProperty("test.build.dir",
+ "target/test-dir") + "/" + TestSSLHttpServer.class.getSimpleName();
- static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
+ private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class);
+ private static Configuration conf;
private static HttpServer server;
private static URL baseUrl;
+ private static String keystoresDir;
+ private static String sslConfDir;
+ private static SSLFactory clientSslFactory;
+
+ @BeforeClass
+ public static void setup() throws Exception {
+ conf = new Configuration();
+ conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
-
- @Before
- public void setup() throws Exception {
- HttpConfig.setPolicy(HttpConfig.Policy.HTTPS_ONLY);
File base = new File(BASEDIR);
FileUtil.fullyDelete(base);
base.mkdirs();
- String classpathDir =
- KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
- Configuration conf = new Configuration();
- String keystoresDir = new File(BASEDIR).getAbsolutePath();
- String sslConfsDir =
- KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
- KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfsDir, conf, false);
- conf.setBoolean(CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, true);
-
- //we do this trick because the MR AppMaster is started in another VM and
- //the HttpServer configuration is not loaded from the job.xml but from the
- //site.xml files in the classpath
- Writer writer = new FileWriter(new File(classpathDir, CONFIG_SITE_XML));
- conf.writeXml(writer);
- writer.close();
+ keystoresDir = new File(BASEDIR).getAbsolutePath();
+ sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
- conf.setInt(HttpServer.HTTP_MAX_THREADS, 10);
- conf.addResource(CONFIG_SITE_XML);
- server = createServer("test", conf);
+ KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
+ Configuration sslConf = new Configuration(false);
+ sslConf.addResource("ssl-server.xml");
+ sslConf.addResource("ssl-client.xml");
+
+ clientSslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, sslConf);
+ clientSslFactory.init();
+
+ server = new HttpServer.Builder()
+ .setName("test")
+ .addEndpoint(new URI("https://localhost"))
+ .setConf(conf)
+ .keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
+ .keyStore(sslConf.get("ssl.server.keystore.location"),
+ sslConf.get("ssl.server.keystore.password"),
+ sslConf.get("ssl.server.keystore.type", "jks"))
+ .trustStore(sslConf.get("ssl.server.truststore.location"),
+ sslConf.get("ssl.server.truststore.password"),
+ sslConf.get("ssl.server.truststore.type", "jks")).build();
server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
server.start();
- baseUrl = new URL("https://localhost:" + server.getPort() + "/");
- LOG.info("HTTP server started: "+ baseUrl);
+ baseUrl = new URL("https://"
+ + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+ LOG.info("HTTP server started: " + baseUrl);
}
- @After
- public void cleanup() throws Exception {
+ @AfterClass
+ public static void cleanup() throws Exception {
server.stop();
- String classpathDir =
- KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
- new File(classpathDir, CONFIG_SITE_XML).delete();
- HttpConfig.setPolicy(HttpConfig.Policy.HTTP_ONLY);
+ FileUtil.fullyDelete(new File(BASEDIR));
+ KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
+ clientSslFactory.destroy();
}
-
@Test
public void testEcho() throws Exception {
- assertEquals("a:b\nc:d\n",
- readOut(new URL(baseUrl, "/echo?a=b&c=d")));
- assertEquals("a:b\nc<:d\ne:>\n",
- readOut(new URL(baseUrl, "/echo?a=b&c<=d&e=>")));
+ assertEquals("a:b\nc:d\n", readOut(new URL(baseUrl, "/echo?a=b&c=d")));
+ assertEquals("a:b\nc<:d\ne:>\n", readOut(new URL(baseUrl,
+ "/echo?a=b&c<=d&e=>")));
}
private static String readOut(URL url) throws Exception {
- StringBuilder out = new StringBuilder();
HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
- Configuration conf = new Configuration();
- conf.addResource(CONFIG_SITE_XML);
- SSLFactory sslf = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
- sslf.init();
- conn.setSSLSocketFactory(sslf.createSSLSocketFactory());
+ conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
InputStream in = conn.getInputStream();
- byte[] buffer = new byte[64 * 1024];
- int len = in.read(buffer);
- while (len > 0) {
- out.append(new String(buffer, 0, len));
- len = in.read(buffer);
- }
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ IOUtils.copyBytes(in, out, 1024);
return out.toString();
}
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java Mon Dec 2 17:41:44 2013
@@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletReq
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
@@ -125,7 +126,8 @@ public class TestServletFilter extends H
}
//access the urls as the sequence
- final String prefix = "http://localhost:" + http.getPort();
+ final String prefix = "http://"
+ + NetUtils.getHostPortString(http.getConnectorAddress(0));
try {
for(int i = 0; i < sequence.length; i++) {
access(prefix + urls[sequence[i]]);
@@ -185,7 +187,7 @@ public class TestServletFilter extends H
throws Exception {
Configuration conf = new Configuration();
HttpServer http = createTestServer(conf);
- http.defineFilter(http.webAppContext,
+ HttpServer.defineFilter(http.webAppContext,
"ErrorFilter", ErrorFilter.class.getName(),
null, null);
try {
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java Mon Dec 2 17:41:44 2013
@@ -21,8 +21,10 @@ import java.io.*;
import java.net.*;
import org.apache.hadoop.http.HttpServer;
+import org.apache.hadoop.net.NetUtils;
import junit.framework.TestCase;
+
import org.apache.commons.logging.*;
import org.apache.commons.logging.impl.*;
import org.apache.log4j.*;
@@ -43,15 +45,16 @@ public class TestLogLevel extends TestCa
assertTrue(!Level.ERROR.equals(log.getEffectiveLevel()));
HttpServer server = new HttpServer.Builder().setName("..")
- .setBindAddress("localhost").setPort(22222).setFindPort(true)
+ .addEndpoint(new URI("http://localhost:0")).setFindPort(true)
.build();
server.start();
- int port = server.getPort();
+ String authority = NetUtils.getHostPortString(server
+ .getConnectorAddress(0));
//servlet
- URL url = new URL("http://localhost:" + port
- + "/logLevel?log=" + logName + "&level=" + Level.ERROR);
+ URL url = new URL("http://" + authority + "/logLevel?log=" + logName
+ + "&level=" + Level.ERROR);
out.println("*** Connecting to " + url);
URLConnection connection = url.openConnection();
connection.connect();
@@ -67,7 +70,7 @@ public class TestLogLevel extends TestCa
assertTrue(Level.ERROR.equals(log.getEffectiveLevel()));
//command line
- String[] args = {"-setlevel", "localhost:"+port, logName,""+Level.DEBUG};
+ String[] args = {"-setlevel", authority, logName, Level.DEBUG.toString()};
LogLevel.main(args);
log.debug("log.debug3");
log.info("log.info3");
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java Mon Dec 2 17:41:44 2013
@@ -22,7 +22,6 @@ import org.apache.hadoop.oncrpc.RpcUtil;
import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.CredentialsNone;
import org.apache.hadoop.oncrpc.security.VerifierNone;
-import org.apache.hadoop.portmap.PortmapInterface.Procedure;
/**
* Helper utility for building portmap request
@@ -37,7 +36,7 @@ public class PortmapRequest {
RpcCall call = RpcCall.getInstance(
RpcUtil.getNewXid(String.valueOf(RpcProgramPortmap.PROGRAM)),
RpcProgramPortmap.PROGRAM, RpcProgramPortmap.VERSION,
- Procedure.PMAPPROC_SET.getValue(), new CredentialsNone(),
+ RpcProgramPortmap.PMAPPROC_SET, new CredentialsNone(),
new VerifierNone());
call.write(request);
return mapping.serialize(request);
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java Mon Dec 2 17:41:44 2013
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.portmap;
-import java.util.HashMap;
+import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -40,20 +40,26 @@ import org.jboss.netty.handler.timeout.I
import org.jboss.netty.handler.timeout.IdleStateAwareChannelUpstreamHandler;
import org.jboss.netty.handler.timeout.IdleStateEvent;
-final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler implements PortmapInterface {
+final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler {
static final int PROGRAM = 100000;
static final int VERSION = 2;
+
+ static final int PMAPPROC_NULL = 0;
+ static final int PMAPPROC_SET = 1;
+ static final int PMAPPROC_UNSET = 2;
+ static final int PMAPPROC_GETPORT = 3;
+ static final int PMAPPROC_DUMP = 4;
+ static final int PMAPPROC_GETVERSADDR = 9;
+
private static final Log LOG = LogFactory.getLog(RpcProgramPortmap.class);
- /** Map synchronized usis monitor lock of this instance */
- private final HashMap<String, PortmapMapping> map;
+ private final ConcurrentHashMap<String, PortmapMapping> map = new ConcurrentHashMap<String, PortmapMapping>();
/** ChannelGroup that remembers all active channels for gracefully shutdown. */
private final ChannelGroup allChannels;
RpcProgramPortmap(ChannelGroup allChannels) {
this.allChannels = allChannels;
- map = new HashMap<String, PortmapMapping>(256);
PortmapMapping m = new PortmapMapping(PROGRAM, VERSION,
PortmapMapping.TRANSPORT_TCP, RpcProgram.RPCB_PORT);
PortmapMapping m1 = new PortmapMapping(PROGRAM, VERSION,
@@ -61,48 +67,66 @@ final class RpcProgramPortmap extends Id
map.put(PortmapMapping.key(m), m);
map.put(PortmapMapping.key(m1), m1);
}
-
- @Override
- public XDR nullOp(int xid, XDR in, XDR out) {
+
+ /**
+ * This procedure does no work. By convention, procedure zero of any protocol
+ * takes no parameters and returns no results.
+ */
+ private XDR nullOp(int xid, XDR in, XDR out) {
return PortmapResponse.voidReply(out, xid);
}
- @Override
- public XDR set(int xid, XDR in, XDR out) {
+ /**
+ * When a program first becomes available on a machine, it registers itself
+ * with the port mapper program on the same machine. The program passes its
+ * program number "prog", version number "vers", transport protocol number
+ * "prot", and the port "port" on which it awaits service request. The
+ * procedure returns a boolean reply whose value is "TRUE" if the procedure
+ * successfully established the mapping and "FALSE" otherwise. The procedure
+ * refuses to establish a mapping if one already exists for the tuple
+ * "(prog, vers, prot)".
+ */
+ private XDR set(int xid, XDR in, XDR out) {
PortmapMapping mapping = PortmapRequest.mapping(in);
String key = PortmapMapping.key(mapping);
if (LOG.isDebugEnabled()) {
LOG.debug("Portmap set key=" + key);
}
- PortmapMapping value = null;
- synchronized(this) {
- map.put(key, mapping);
- value = map.get(key);
- }
- return PortmapResponse.intReply(out, xid, value.getPort());
+ map.put(key, mapping);
+ return PortmapResponse.intReply(out, xid, mapping.getPort());
}
- @Override
- public synchronized XDR unset(int xid, XDR in, XDR out) {
+ /**
+ * When a program becomes unavailable, it should unregister itself with the
+ * port mapper program on the same machine. The parameters and results have
+ * meanings identical to those of "PMAPPROC_SET". The protocol and port number
+ * fields of the argument are ignored.
+ */
+ private XDR unset(int xid, XDR in, XDR out) {
PortmapMapping mapping = PortmapRequest.mapping(in);
- synchronized(this) {
- map.remove(PortmapMapping.key(mapping));
- }
+ String key = PortmapMapping.key(mapping);
+
+ if (LOG.isDebugEnabled())
+ LOG.debug("Portmap remove key=" + key);
+
+ map.remove(key);
return PortmapResponse.booleanReply(out, xid, true);
}
- @Override
- public synchronized XDR getport(int xid, XDR in, XDR out) {
+ /**
+ * Given a program number "prog", version number "vers", and transport
+ * protocol number "prot", this procedure returns the port number on which the
+ * program is awaiting call requests. A port value of zeros means the program
+ * has not been registered. The "port" field of the argument is ignored.
+ */
+ private XDR getport(int xid, XDR in, XDR out) {
PortmapMapping mapping = PortmapRequest.mapping(in);
String key = PortmapMapping.key(mapping);
if (LOG.isDebugEnabled()) {
LOG.debug("Portmap GETPORT key=" + key + " " + mapping);
}
- PortmapMapping value = null;
- synchronized(this) {
- value = map.get(key);
- }
+ PortmapMapping value = map.get(key);
int res = 0;
if (value != null) {
res = value.getPort();
@@ -115,13 +139,13 @@ final class RpcProgramPortmap extends Id
return PortmapResponse.intReply(out, xid, res);
}
- @Override
- public synchronized XDR dump(int xid, XDR in, XDR out) {
- PortmapMapping[] pmapList = null;
- synchronized(this) {
- pmapList = new PortmapMapping[map.values().size()];
- map.values().toArray(pmapList);
- }
+ /**
+ * This procedure enumerates all entries in the port mapper's database. The
+ * procedure takes no parameters and returns a list of program, version,
+ * protocol, and port values.
+ */
+ private XDR dump(int xid, XDR in, XDR out) {
+ PortmapMapping[] pmapList = map.values().toArray(new PortmapMapping[0]);
return PortmapResponse.pmapList(out, xid, pmapList);
}
@@ -131,23 +155,23 @@ final class RpcProgramPortmap extends Id
RpcInfo info = (RpcInfo) e.getMessage();
RpcCall rpcCall = (RpcCall) info.header();
- final Procedure portmapProc = Procedure.fromValue(rpcCall.getProcedure());
+ final int portmapProc = rpcCall.getProcedure();
int xid = rpcCall.getXid();
XDR in = new XDR(info.data().toByteBuffer().asReadOnlyBuffer(),
XDR.State.READING);
XDR out = new XDR();
- if (portmapProc == Procedure.PMAPPROC_NULL) {
+ if (portmapProc == PMAPPROC_NULL) {
out = nullOp(xid, in, out);
- } else if (portmapProc == Procedure.PMAPPROC_SET) {
+ } else if (portmapProc == PMAPPROC_SET) {
out = set(xid, in, out);
- } else if (portmapProc == Procedure.PMAPPROC_UNSET) {
+ } else if (portmapProc == PMAPPROC_UNSET) {
out = unset(xid, in, out);
- } else if (portmapProc == Procedure.PMAPPROC_DUMP) {
+ } else if (portmapProc == PMAPPROC_DUMP) {
out = dump(xid, in, out);
- } else if (portmapProc == Procedure.PMAPPROC_GETPORT) {
+ } else if (portmapProc == PMAPPROC_GETPORT) {
out = getport(xid, in, out);
- } else if (portmapProc == Procedure.PMAPPROC_GETVERSADDR) {
+ } else if (portmapProc == PMAPPROC_GETVERSADDR) {
out = getport(xid, in, out);
} else {
LOG.info("PortmapHandler unknown rpc procedure=" + portmapProc);
@@ -161,7 +185,7 @@ final class RpcProgramPortmap extends Id
RpcResponse rsp = new RpcResponse(buf, info.remoteAddress());
RpcUtil.sendRpcResponse(ctx, rsp);
}
-
+
@Override
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
throws Exception {
Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/portmap/TestPortmap.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/portmap/TestPortmap.java?rev=1547122&r1=1547121&r2=1547122&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/portmap/TestPortmap.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/portmap/TestPortmap.java Mon Dec 2 17:41:44 2013
@@ -23,7 +23,7 @@ import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.Socket;
-import java.util.HashMap;
+import java.util.Map;
import junit.framework.Assert;
@@ -80,7 +80,7 @@ public class TestPortmap {
XDR req = new XDR();
RpcCall.getInstance(++xid, RpcProgramPortmap.PROGRAM,
RpcProgramPortmap.VERSION,
- PortmapInterface.Procedure.PMAPPROC_SET.getValue(),
+ RpcProgramPortmap.PMAPPROC_SET,
new CredentialsNone(), new VerifierNone()).write(req);
PortmapMapping sent = new PortmapMapping(90000, 1,
@@ -101,7 +101,7 @@ public class TestPortmap {
Thread.sleep(100);
boolean found = false;
@SuppressWarnings("unchecked")
- HashMap<String, PortmapMapping> map = (HashMap<String, PortmapMapping>) Whitebox
+ Map<String, PortmapMapping> map = (Map<String, PortmapMapping>) Whitebox
.getInternalState(pm.getHandler(), "map");
for (PortmapMapping m : map.values()) {