You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2012/05/03 04:14:30 UTC
svn commit: r1333291 [3/4] - in
/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project: ./ conf/
dev-support/ hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/
hado...
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java Thu May 3 02:14:01 2012
@@ -30,7 +30,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.token.SecretManager;
@@ -62,11 +61,20 @@ public class RpcServerFactoryPBImpl impl
private RpcServerFactoryPBImpl() {
}
- @Override
public Server getServer(Class<?> protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers)
throws YarnException {
+ return getServer(protocol, instance, addr, conf, secretManager, numHandlers,
+ null);
+ }
+
+ @Override
+ public Server getServer(Class<?> protocol, Object instance,
+ InetSocketAddress addr, Configuration conf,
+ SecretManager<? extends TokenIdentifier> secretManager, int numHandlers,
+ String portRangeConfig)
+ throws YarnException {
Constructor<?> constructor = serviceCache.get(protocol);
if (constructor == null) {
@@ -122,7 +130,7 @@ public class RpcServerFactoryPBImpl impl
try {
return createServer(pbProtocol, addr, conf, secretManager, numHandlers,
- (BlockingService)method.invoke(null, service));
+ (BlockingService)method.invoke(null, service), portRangeConfig);
} catch (InvocationTargetException e) {
throw new YarnException(e);
} catch (IllegalAccessException e) {
@@ -156,13 +164,13 @@ public class RpcServerFactoryPBImpl impl
private Server createServer(Class<?> pbProtocol, InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers,
- BlockingService blockingService) throws IOException {
+ BlockingService blockingService, String portRangeConfig) throws IOException {
RPC.setProtocolEngine(conf, pbProtocol, ProtobufRpcEngine.class);
RPC.Server server = RPC.getServer(pbProtocol, blockingService,
addr.getHostName(), addr.getPort(), numHandlers, false, conf,
- secretManager);
+ secretManager, portRangeConfig);
LOG.info("Adding protocol "+pbProtocol.getCanonicalName()+" to the server");
- server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, pbProtocol, blockingService);
+ server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, pbProtocol, blockingService);
return server;
}
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/HadoopYarnProtoRPC.java Thu May 3 02:14:01 2012
@@ -56,12 +56,12 @@ public class HadoopYarnProtoRPC extends
public Server getServer(Class protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager,
- int numHandlers) {
+ int numHandlers, String portRangeConfig) {
LOG.debug("Creating a HadoopYarnProtoRpc server for protocol " + protocol +
" with " + numHandlers + " handlers");
return RpcFactoryProvider.getServerFactory(conf).getServer(protocol,
- instance, addr, conf, secretManager, numHandlers);
+ instance, addr, conf, secretManager, numHandlers, portRangeConfig);
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java Thu May 3 02:14:01 2012
@@ -43,8 +43,16 @@ public abstract class YarnRPC {
public abstract Server getServer(Class protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager,
- int numHandlers);
+ int numHandlers, String portRangeConfig);
+ public Server getServer(Class protocol, Object instance,
+ InetSocketAddress addr, Configuration conf,
+ SecretManager<? extends TokenIdentifier> secretManager,
+ int numHandlers) {
+ return getServer(protocol, instance, addr, conf, secretManager, numHandlers,
+ null);
+ }
+
public static YarnRPC create(Configuration conf) {
LOG.debug("Creating YarnRPC for " +
conf.get(YarnConfiguration.IPC_RPC_IMPL));
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java Thu May 3 02:14:01 2012
@@ -29,6 +29,9 @@ import java.io.InputStreamReader;
import java.io.IOException;
import java.io.Writer;
import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
@@ -112,8 +115,11 @@ public class AggregatedLogFormat {
// the entire k-v format
public LogValue(List<String> rootLogDirs, ContainerId containerId) {
- this.rootLogDirs = rootLogDirs;
+ this.rootLogDirs = new ArrayList<String>(rootLogDirs);
this.containerId = containerId;
+
+ // Ensure logs are processed in lexical order
+ Collections.sort(this.rootLogDirs);
}
public void write(DataOutputStream out) throws IOException {
@@ -131,7 +137,10 @@ public class AggregatedLogFormat {
continue; // ContainerDir may have been deleted by the user.
}
- for (File logFile : containerLogDir.listFiles()) {
+ // Write out log files in lexical order
+ File[] logFiles = containerLogDir.listFiles();
+ Arrays.sort(logFiles);
+ for (File logFile : logFiles) {
// Write the logFile Type
out.writeUTF(logFile.getName());
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java Thu May 3 02:14:01 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.webapp;
import static com.google.common.base.Preconditions.checkNotNull;
+import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -75,6 +76,14 @@ public abstract class WebApp extends Ser
@Provides public HttpServer httpServer() { return httpServer; }
+ /**
+ * Get the address the http server is bound to
+ * @return InetSocketAddress
+ */
+ public InetSocketAddress getListenerAddress() {
+ return checkNotNull(httpServer, "httpServer").getListenerAddress();
+ }
+
public int port() {
return checkNotNull(httpServer, "httpServer").getPort();
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java Thu May 3 02:14:01 2012
@@ -79,11 +79,11 @@ public class JQueryUI extends HtmlBlock
@Override
protected void render(Block html) {
html.
- link(join("https://ajax.googleapis.com/ajax/libs/jqueryui/1.8.16/themes/",
- getTheme(), "/jquery-ui.css")).
+ link(root_url(join("static/jquery/themes-1.8.16/",
+ getTheme(), "/jquery-ui.css"))).
link(root_url("static/dt-1.7.5/css/jui-dt.css")).
- script("https://ajax.googleapis.com/ajax/libs/jquery/1.6.4/jquery.min.js").
- script("https://ajax.googleapis.com/ajax/libs/jqueryui/1.8.16/jquery-ui.min.js").
+ script(root_url("static/jquery/jquery.min-1.6.4.js")).
+ script(root_url("static/jquery/jquery-ui.min-1.8.16.js")).
script(root_url("static/dt-1.7.5/js/jquery.dataTables.min.js")).
script(root_url("static/yarn.dt.plugins.js")).
script(root_url("static/themeswitcher.js")).
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/themeswitcher.js.gz
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/themeswitcher.js.gz?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
Binary files - no diff available.
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml Thu May 3 02:14:01 2012
@@ -188,6 +188,22 @@
</property>
<property>
+ <description>The minimum allocation size for every container request at the RM,
+ in MBs. Memory requests lower than this won't take effect,
+ and the specified value will get allocated at minimum.</description>
+ <name>yarn.scheduler.minimum-allocation-mb</name>
+ <value>128</value>
+ </property>
+
+ <property>
+ <description>The maximum allocation size for every container request at the RM,
+ in MBs. Memory requests higher than this won't take effect,
+ and will get capped to this value.</description>
+ <name>yarn.scheduler.maximum-allocation-mb</name>
+ <value>10240</value>
+ </property>
+
+ <property>
<description>The class to use as the persistent store.</description>
<name>yarn.resourcemanager.store.class</name>
</property>
@@ -262,6 +278,26 @@
</property>
<property>
+ <description>
+ Number of seconds after an application finishes before the nodemanager's
+ DeletionService will delete the application's localized file directory
+ and log directory.
+
+ To diagnose Yarn application problems, set this property's value large
+ enough (for example, to 600 = 10 minutes) to permit examination of these
+ directories. After changing the property's value, you must restart the
+ nodemanager in order for it to have an effect.
+
+ The roots of Yarn applications' work directories is configurable with
+ the yarn.nodemanager.local-dirs property (see below), and the roots
+ of the Yarn applications' log directories is configurable with the
+ yarn.nodemanager.log-dirs property (see also below).
+ </description>
+ <name>yarn.nodemanager.delete.debug-delay-sec</name>
+ <value>0</value>
+ </property>
+
+ <property>
<description>Heartbeat interval to RM</description>
<name>yarn.nodemanager.heartbeat.interval-ms</name>
<value>1000</value>
@@ -274,7 +310,12 @@
</property>
<property>
- <description>List of directories to store localized files in.</description>
+ <description>List of directories to store localized files in. An
+ application's localized file directory will be found in:
+ ${yarn.nodemanager.local-dirs}/usercache/${user}/appcache/application_${appid}.
+ Individual containers' work directories, called container_${contid}, will
+ be subdirectories of this.
+ </description>
<name>yarn.nodemanager.local-dirs</name>
<value>/tmp/nm-local-dir</value>
</property>
@@ -310,7 +351,13 @@
</property>
<property>
- <description>Where to store container logs.</description>
+ <description>
+ Where to store container logs. An application's localized log directory
+ will be found in ${yarn.nodemanager.log-dirs}/application_${appid}.
+ Individual containers' log directories will be below this, in directories
+ named container_{$contid}. Each container directory will contain the files
+ stderr, stdin, and syslog generated by that container.
+ </description>
<name>yarn.nodemanager.log-dirs</name>
<value>/tmp/logs</value>
</property>
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java Thu May 3 02:14:01 2012
@@ -149,6 +149,18 @@ public class TestWebApp {
app.stop();
}
+ @Test public void testCreateWithPort() {
+ // see if the ephemeral port is updated
+ WebApp app = WebApps.$for(this).at(0).start();
+ int port = app.getListenerAddress().getPort();
+ assertTrue(port > 0);
+ app.stop();
+ // try to reuse the port
+ app = WebApps.$for(this).at(port).start();
+ assertEquals(port, app.getListenerAddress().getPort());
+ app.stop();
+ }
+
@Test public void testServePaths() {
WebApp app = WebApps.$for("test", this).start();
assertEquals("/test", app.getRedirectPath());
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml Thu May 3 02:14:01 2012
@@ -27,7 +27,7 @@
<properties>
<!-- Basedir eeded for generating FindBugs warnings using parent pom -->
<yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
- <container-executor.conf.dir>/etc/hadoop</container-executor.conf.dir>
+ <container-executor.conf.dir>../etc/hadoop</container-executor.conf.dir>
<container-executor.additional_cflags></container-executor.additional_cflags>
</properties>
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java Thu May 3 02:14:01 2012
@@ -19,10 +19,9 @@
package org.apache.hadoop.yarn.server.nodemanager;
import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.Collections;
import java.util.List;
-import java.util.ListIterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -41,23 +40,22 @@ class DirectoryCollection {
private int numFailures;
public DirectoryCollection(String[] dirs) {
- localDirs = new ArrayList<String>();
- localDirs.addAll(Arrays.asList(dirs));
- failedDirs = new ArrayList<String>();
+ localDirs = new CopyOnWriteArrayList<String>(dirs);
+ failedDirs = new CopyOnWriteArrayList<String>();
}
/**
* @return the current valid directories
*/
synchronized List<String> getGoodDirs() {
- return localDirs;
+ return Collections.unmodifiableList(localDirs);
}
/**
* @return the failed directories
*/
synchronized List<String> getFailedDirs() {
- return failedDirs;
+ return Collections.unmodifiableList(failedDirs);
}
/**
@@ -75,22 +73,17 @@ class DirectoryCollection {
*/
synchronized boolean checkDirs() {
int oldNumFailures = numFailures;
- ListIterator<String> it = localDirs.listIterator();
- while (it.hasNext()) {
- final String dir = it.next();
+ for (final String dir : localDirs) {
try {
DiskChecker.checkDir(new File(dir));
} catch (DiskErrorException de) {
LOG.warn("Directory " + dir + " error " +
de.getMessage() + ", removing from the list of valid directories.");
- it.remove();
+ localDirs.remove(dir);
failedDirs.add(dir);
numFailures++;
}
}
- if (numFailures > oldNumFailures) {
- return true;
- }
- return false;
+ return numFailures > oldNumFailures;
}
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java Thu May 3 02:14:01 2012
@@ -30,6 +30,8 @@ import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
@@ -269,12 +271,15 @@ public class ContainerLogsPage extends N
}
}
} else {
- // Just print out the log-types
+ // Print out log types in lexical order
List<File> containerLogsDirs = getContainerLogDirs(containerId,
dirsHandler);
+ Collections.sort(containerLogsDirs);
boolean foundLogFile = false;
for (File containerLogsDir : containerLogsDirs) {
- for (File logFile : containerLogsDir.listFiles()) {
+ File[] logFiles = containerLogsDir.listFiles();
+ Arrays.sort(logFiles);
+ for (File logFile : logFiles) {
foundLogFile = true;
html.p()
.a(url("containerlogs", $(CONTAINER_ID), $(APP_OWNER),
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java Thu May 3 02:14:01 2012
@@ -19,6 +19,7 @@ package org.apache.hadoop.yarn.server.no
import java.util.Map.Entry;
+import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@@ -58,8 +59,11 @@ public class NMWebServices {
private static RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
+ private @javax.ws.rs.core.Context
+ HttpServletResponse response;
+
@javax.ws.rs.core.Context
- UriInfo uriInfo;
+ UriInfo uriInfo;
@Inject
public NMWebServices(final Context nm, final ResourceView view,
@@ -69,6 +73,11 @@ public class NMWebServices {
this.webapp = webapp;
}
+ private void init() {
+ //clear content type
+ response.setContentType(null);
+ }
+
@GET
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public NodeInfo get() {
@@ -79,6 +88,7 @@ public class NMWebServices {
@Path("/info")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public NodeInfo getNodeInfo() {
+ init();
return new NodeInfo(this.nmContext, this.rview);
}
@@ -87,6 +97,7 @@ public class NMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AppsInfo getNodeApps(@QueryParam("state") String stateQuery,
@QueryParam("user") String userQuery) {
+ init();
AppsInfo allApps = new AppsInfo();
for (Entry<ApplicationId, Application> entry : this.nmContext
.getApplications().entrySet()) {
@@ -116,6 +127,7 @@ public class NMWebServices {
@Path("/apps/{appid}")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AppInfo getNodeApp(@PathParam("appid") String appId) {
+ init();
ApplicationId id = ConverterUtils.toApplicationId(recordFactory, appId);
if (id == null) {
throw new NotFoundException("app with id " + appId + " not found");
@@ -132,6 +144,7 @@ public class NMWebServices {
@Path("/containers")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ContainersInfo getNodeContainers() {
+ init();
ContainersInfo allContainers = new ContainersInfo();
for (Entry<ContainerId, Container> entry : this.nmContext.getContainers()
.entrySet()) {
@@ -151,6 +164,7 @@ public class NMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ContainerInfo getNodeContainer(@PathParam("containerid") String id) {
ContainerId containerId = null;
+ init();
try {
containerId = ConverterUtils.toContainerId(id);
} catch (Exception e) {
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.c Thu May 3 02:14:01 2012
@@ -29,6 +29,7 @@
#include <string.h>
#include <sys/stat.h>
#include <sys/types.h>
+#include <limits.h>
#define MAX_SIZE 10
@@ -87,6 +88,25 @@ static int is_only_root_writable(const c
}
/**
+ * Return a string with the configuration file path name resolved via realpath(3)
+ *
+ * NOTE: relative path names are resolved relative to the second argument not getwd(3)
+ */
+char *resolve_config_path(const char* file_name, const char *root) {
+ const char *real_fname = NULL;
+ char buffer[PATH_MAX*2 + 1];
+
+ if (file_name[0] == '/') {
+ real_fname = file_name;
+ } else if (realpath(root, buffer) != NULL) {
+ strncpy(strrchr(buffer, '/') + 1, file_name, PATH_MAX);
+ real_fname = buffer;
+ }
+
+ return (real_fname == NULL) ? NULL : realpath(real_fname, NULL);
+}
+
+/**
* Ensure that the configuration file and all of the containing directories
* are only writable by root. Otherwise, an attacker can change the
* configuration and potentially cause damage.
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/configuration.h Thu May 3 02:14:01 2012
@@ -24,6 +24,13 @@
*/
int check_configuration_permissions(const char* file_name);
+/**
+ * Return a string with the configuration file path name resolved via realpath(3)
+ *
+ * NOTE: relative path names are resolved relative to the second argument not getwd(3)
+ */
+char *resolve_config_path(const char* file_name, const char *root);
+
// read the given configuration file
void read_config(const char* config_file);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/main.c Thu May 3 02:14:01 2012
@@ -33,6 +33,12 @@
#define STRINGIFY(X) _STRINGIFY(X)
#define CONF_FILENAME "container-executor.cfg"
+// When building as part of a Maven build this value gets defined by using
+// container-executor.conf.dir property. See:
+// hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+// for details.
+// NOTE: if this ends up being a relative path it gets resolved relative to
+// the location of the container-executor binary itself, not getwd(3)
#ifndef HADOOP_CONF_DIR
#error HADOOP_CONF_DIR must be defined
#endif
@@ -96,7 +102,7 @@ int main(int argc, char **argv) {
char *executable_file = get_executable();
char *orig_conf_file = STRINGIFY(HADOOP_CONF_DIR) "/" CONF_FILENAME;
- char *conf_file = realpath(orig_conf_file, NULL);
+ char *conf_file = resolve_config_path(orig_conf_file, argv[0]);
char *local_dirs, *log_dirs;
if (conf_file == NULL) {
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c Thu May 3 02:14:01 2012
@@ -196,6 +196,18 @@ void test_check_user() {
}
}
+void test_resolve_config_path() {
+ printf("\nTesting resolve_config_path\n");
+ if (strcmp(resolve_config_path("/etc/passwd", NULL), "/etc/passwd") != 0) {
+ printf("FAIL: failed to resolve config_name on an absolute path name: /etc/passwd\n");
+ exit(1);
+ }
+ if (strcmp(resolve_config_path("../etc/passwd", "/etc/passwd"), "/etc/passwd") != 0) {
+ printf("FAIL: failed to resolve config_name on a relative path name: ../etc/passwd (relative to /etc/passwd)");
+ exit(1);
+ }
+}
+
void test_check_configuration_permissions() {
printf("\nTesting check_configuration_permissions\n");
if (check_configuration_permissions("/etc/passwd") != 0) {
@@ -668,7 +680,9 @@ int main(int argc, char **argv) {
int my_username = 0;
// clean up any junk from previous run
- system("chmod -R u=rwx " TEST_ROOT "; rm -fr " TEST_ROOT);
+ if (system("chmod -R u=rwx " TEST_ROOT "; rm -fr " TEST_ROOT)) {
+ exit(1);
+ }
if (mkdirs(TEST_ROOT "/logs/userlogs", 0755) != 0) {
exit(1);
@@ -700,6 +714,9 @@ int main(int argc, char **argv) {
printf("\nStarting tests\n");
+ printf("\nTesting resolve_config_path()\n");
+ test_resolve_config_path();
+
printf("\nTesting get_user_directory()\n");
test_get_user_directory();
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java Thu May 3 02:14:01 2012
@@ -120,6 +120,11 @@ public class AdminService extends Abstra
}
this.server.start();
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ this.server.getListenerAddress().getHostName() + ":" + this.server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_ADMIN_ADDRESS, resolvedAddress);
+ }
super.start();
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java Thu May 3 02:14:01 2012
@@ -119,11 +119,14 @@ public class ApplicationMasterService ex
}
this.server.start();
-
this.bindAddress =
NetUtils.createSocketAddr(masterServiceAddress.getHostName(),
this.server.getPort());
-
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ this.server.getListenerAddress().getHostName() + ":" + this.server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, resolvedAddress);
+ }
super.start();
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java Thu May 3 02:14:01 2012
@@ -150,6 +150,11 @@ public class ClientRMService extends Abs
}
this.server.start();
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ this.server.getListenerAddress().getHostName() + ":" + this.server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_ADDRESS, resolvedAddress);
+ }
super.start();
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java Thu May 3 02:14:01 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.re
import java.io.IOException;
+import java.net.InetAddress;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@@ -443,14 +444,14 @@ public class ResourceManager extends Com
WebApps.$for("cluster", ApplicationMasterService.class, masterService, "ws").at(
this.conf.get(YarnConfiguration.RM_WEBAPP_ADDRESS,
YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS));
+ String proxyHostAndPort = YarnConfiguration.getProxyHostAndPort(conf);
if(YarnConfiguration.getRMWebAppHostAndPort(conf).
- equals(YarnConfiguration.getProxyHostAndPort(conf))) {
+ equals(proxyHostAndPort)) {
AppReportFetcher fetcher = new AppReportFetcher(conf, getClientRMService());
builder.withServlet(ProxyUriUtils.PROXY_SERVLET_NAME,
ProxyUriUtils.PROXY_PATH_SPEC, WebAppProxyServlet.class);
builder.withAttribute(WebAppProxy.FETCHER_ATTRIBUTE, fetcher);
- String proxy = YarnConfiguration.getProxyHostAndPort(conf);
- String[] proxyParts = proxy.split(":");
+ String[] proxyParts = proxyHostAndPort.split(":");
builder.withAttribute(WebAppProxy.PROXY_HOST_ATTRIBUTE, proxyParts[0]);
}
@@ -475,6 +476,15 @@ public class ResourceManager extends Com
} catch(IOException ie) {
throw new YarnException("Failed to start secret manager threads", ie);
}
+
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String hostname = getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS);
+ hostname = (hostname.contains(":")) ? hostname.substring(0, hostname.indexOf(":")) : hostname;
+ int port = webApp.port();
+ String resolvedAddress = hostname + ":" + port;
+ conf.set(YarnConfiguration.RM_WEBAPP_ADDRESS, resolvedAddress);
+ }
super.start();
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java Thu May 3 02:14:01 2012
@@ -133,6 +133,11 @@ public class ResourceTrackerService exte
}
this.server.start();
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress =
+ server.getListenerAddress().getHostName() + ":" + server.getListenerAddress().getPort();
+ conf.set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, resolvedAddress);
+ }
}
@Override
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java Thu May 3 02:14:01 2012
@@ -30,6 +30,7 @@ import org.apache.hadoop.util.StringUtil
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
public class CapacitySchedulerConfiguration extends Configuration {
@@ -71,18 +72,6 @@ public class CapacitySchedulerConfigurat
@Private
public static final String STATE = "state";
- private static final int MINIMUM_MEMORY = 1024;
-
- @Private
- public static final String MINIMUM_ALLOCATION =
- PREFIX + "minimum-allocation-mb";
-
- private static final int MAXIMUM_MEMORY = 10240;
-
- @Private
- public static final String MAXIMUM_ALLOCATION =
- PREFIX + "maximum-allocation-mb";
-
@Private
public static final int DEFAULT_MAXIMUM_SYSTEM_APPLICATIIONS = 10000;
@@ -253,12 +242,16 @@ public class CapacitySchedulerConfigurat
}
public Resource getMinimumAllocation() {
- int minimumMemory = getInt(MINIMUM_ALLOCATION, MINIMUM_MEMORY);
+ int minimumMemory = getInt(
+ YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
return Resources.createResource(minimumMemory);
}
public Resource getMaximumAllocation() {
- int maximumMemory = getInt(MAXIMUM_ALLOCATION, MAXIMUM_MEMORY);
+ int maximumMemory = getInt(
+ YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB);
return Resources.createResource(maximumMemory);
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/FifoScheduler.java Thu May 3 02:14:01 2012
@@ -52,6 +52,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
@@ -107,19 +108,6 @@ public class FifoScheduler implements Re
private Map<NodeId, SchedulerNode> nodes = new ConcurrentHashMap<NodeId, SchedulerNode>();
- private static final int MINIMUM_MEMORY = 1024;
-
- private static final String FIFO_PREFIX = "yarn.scheduler.fifo.";
- @Private
- public static final String MINIMUM_ALLOCATION =
- FIFO_PREFIX + "minimum-allocation-mb";
-
- private static final int MAXIMUM_MEMORY = 10240;
-
- @Private
- public static final String MAXIMUM_ALLOCATION =
- FIFO_PREFIX + "maximum-allocation-mb";
-
private boolean initialized;
private Resource minimumAllocation;
private Resource maximumAllocation;
@@ -218,9 +206,13 @@ public class FifoScheduler implements Re
this.containerTokenSecretManager = containerTokenSecretManager;
this.rmContext = rmContext;
this.minimumAllocation =
- Resources.createResource(conf.getInt(MINIMUM_ALLOCATION, MINIMUM_MEMORY));
+ Resources.createResource(conf.getInt(
+ YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB));
this.maximumAllocation =
- Resources.createResource(conf.getInt(MAXIMUM_ALLOCATION, MAXIMUM_MEMORY));
+ Resources.createResource(conf.getInt(
+ YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB));
this.metrics = QueueMetrics.forQueue(DEFAULT_QUEUE_NAME, null, false,
conf);
this.activeUsersManager = new ActiveUsersManager(metrics);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppBlock.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppBlock.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppBlock.java Thu May 3 02:14:01 2012
@@ -18,21 +18,85 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ID;
+
+import com.google.inject.Inject;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
+import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
+import org.apache.hadoop.yarn.util.Apps;
+import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
-
-import com.google.inject.Inject;
+import org.apache.hadoop.yarn.webapp.ResponseInfo;
public class AppBlock extends HtmlBlock {
+ private ApplicationACLsManager aclsManager;
+
@Inject
- AppBlock(ResourceManager rm, ViewContext ctx) {
+ AppBlock(ResourceManager rm, ViewContext ctx, ApplicationACLsManager aclsManager) {
super(ctx);
+ this.aclsManager = aclsManager;
}
@Override
protected void render(Block html) {
+ String aid = $(APPLICATION_ID);
+ if (aid.isEmpty()) {
+ puts("Bad request: requires application ID");
+ return;
+ }
+ ApplicationId appID = Apps.toAppID(aid);
+ RMContext context = getInstance(RMContext.class);
+ RMApp rmApp = context.getRMApps().get(appID);
+ if (rmApp == null) {
+ puts("Application not found: "+ aid);
+ return;
+ }
+ AppInfo app = new AppInfo(rmApp, true);
+
+ // Check for the authorization.
+ String remoteUser = request().getRemoteUser();
+ UserGroupInformation callerUGI = null;
+ if (remoteUser != null) {
+ callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+ }
+ if (callerUGI != null
+ && !this.aclsManager.checkAccess(callerUGI,
+ ApplicationAccessType.VIEW_APP, app.getUser(), appID)) {
+ puts("You (User " + remoteUser
+ + ") are not authorized to view the logs for application " + appID);
+ return;
+ }
+
+ setTitle(join("Application ", aid));
+
+ ResponseInfo info = info("Application Overview").
+ _("User:", app.getUser()).
+ _("Name:", app.getName()).
+ _("State:", app.getState()).
+ _("FinalStatus:", app.getFinalStatus()).
+ _("Started:", Times.format(app.getStartTime())).
+ _("Elapsed:", StringUtils.formatTime(
+ Times.elapsed(app.getStartTime(), app.getFinishTime()))).
+ _("Tracking URL:", !app.isTrackingUrlReady() ?
+ "#" : app.getTrackingUrlPretty(), app.getTrackingUI()).
+ _("Diagnostics:", app.getNote());
+ if (app.amContainerLogsExist()) {
+ info._("AM container logs:", app.getAMContainerLogs(), app.getAMContainerLogs());
+ } else {
+ info._("AM container logs:", "");
+ }
+
html._(InfoBlock.class);
}
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java Thu May 3 02:14:01 2012
@@ -121,7 +121,8 @@ class NodesPage extends RmView {
row.td().a("http://" + httpAddress, httpAddress)._();
}
row.td(info.getHealthStatus()).
- td(Times.format(info.getLastHealthUpdate())).
+ td().br().$title(String.valueOf(info.getLastHealthUpdate()))._().
+ _(Times.format(info.getLastHealthUpdate()))._().
td(info.getHealthReport()).
td(String.valueOf(info.getNumContainers())).
td().br().$title(String.valueOf(usedMemory))._().
@@ -153,10 +154,12 @@ class NodesPage extends RmView {
}
private String nodesTableInit() {
- StringBuilder b = tableInit().append(",aoColumnDefs:[");
- b.append("{'bSearchable':false, 'aTargets': [7]} ,");
- b.append("{'sType':'title-numeric', 'bSearchable':false, " +
- "'aTargets': [ 8, 9] }]}");
+ StringBuilder b = tableInit().append(", aoColumnDefs: [");
+ b.append("{'bSearchable': false, 'aTargets': [ 7 ]}");
+ b.append(", {'sType': 'title-numeric', 'bSearchable': false, " +
+ "'aTargets': [ 8, 9 ] }");
+ b.append(", {'sType': 'title-numeric', 'aTargets': [ 5 ]}");
+ b.append("]}");
return b.toString();
}
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java Thu May 3 02:14:01 2012
@@ -23,6 +23,7 @@ import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@@ -77,6 +78,8 @@ public class RMWebServices {
.getRecordFactory(null);
private final ApplicationACLsManager aclsManager;
+ private @Context HttpServletResponse response;
+
@Inject
public RMWebServices(final ResourceManager rm,
final ApplicationACLsManager aclsManager) {
@@ -100,6 +103,11 @@ public class RMWebServices {
return true;
}
+ private void init() {
+ //clear content type
+ response.setContentType(null);
+ }
+
@GET
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ClusterInfo get() {
@@ -110,6 +118,7 @@ public class RMWebServices {
@Path("/info")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ClusterInfo getClusterInfo() {
+ init();
return new ClusterInfo(this.rm);
}
@@ -117,6 +126,7 @@ public class RMWebServices {
@Path("/metrics")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ClusterMetricsInfo getClusterMetricsInfo() {
+ init();
return new ClusterMetricsInfo(this.rm, this.rm.getRMContext());
}
@@ -124,6 +134,7 @@ public class RMWebServices {
@Path("/scheduler")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public SchedulerTypeInfo getSchedulerInfo() {
+ init();
ResourceScheduler rs = rm.getResourceScheduler();
SchedulerInfo sinfo;
if (rs instanceof CapacityScheduler) {
@@ -143,6 +154,7 @@ public class RMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public NodesInfo getNodes(@QueryParam("state") String filterState,
@QueryParam("healthy") String healthState) {
+ init();
ResourceScheduler sched = this.rm.getResourceScheduler();
if (sched == null) {
throw new NotFoundException("Null ResourceScheduler instance");
@@ -197,6 +209,7 @@ public class RMWebServices {
@Path("/nodes/{nodeId}")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public NodeInfo getNode(@PathParam("nodeId") String nodeId) {
+ init();
if (nodeId == null || nodeId.isEmpty()) {
throw new NotFoundException("nodeId, " + nodeId + ", is empty or null");
}
@@ -246,6 +259,7 @@ public class RMWebServices {
long fBegin = 0;
long fEnd = Long.MAX_VALUE;
+ init();
if (count != null && !count.isEmpty()) {
checkCount = true;
countNum = Long.parseLong(count);
@@ -355,6 +369,7 @@ public class RMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AppInfo getApp(@Context HttpServletRequest hsr,
@PathParam("appid") String appId) {
+ init();
if (appId == null || appId.isEmpty()) {
throw new NotFoundException("appId, " + appId + ", is empty or null");
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java Thu May 3 02:14:01 2012
@@ -20,27 +20,12 @@ package org.apache.hadoop.yarn.server.re
import static org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebApp.QUEUE_NAME;
import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ID;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
-import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
-import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
-import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
-import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.StringHelper;
-import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.Controller;
-import org.apache.hadoop.yarn.webapp.ResponseInfo;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import com.google.inject.Inject;
@@ -49,12 +34,9 @@ import com.google.inject.Inject;
// on Mac OS HFS as its case-insensitive!
public class RmController extends Controller {
- private ApplicationACLsManager aclsManager;
-
@Inject
- RmController(RequestContext ctx, ApplicationACLsManager aclsManager) {
+ RmController(RequestContext ctx) {
super(ctx);
- this.aclsManager = aclsManager;
}
@Override public void index() {
@@ -67,57 +49,6 @@ public class RmController extends Contro
}
public void app() {
- String aid = $(APPLICATION_ID);
- if (aid.isEmpty()) {
- setStatus(HttpServletResponse.SC_BAD_REQUEST);
- setTitle("Bad request: requires application ID");
- return;
- }
- ApplicationId appID = Apps.toAppID(aid);
- RMContext context = getInstance(RMContext.class);
- RMApp rmApp = context.getRMApps().get(appID);
- if (rmApp == null) {
- // TODO: handle redirect to jobhistory server
- setStatus(HttpServletResponse.SC_NOT_FOUND);
- setTitle("Application not found: "+ aid);
- return;
- }
- AppInfo app = new AppInfo(rmApp, true);
-
- // Check for the authorization.
- String remoteUser = request().getRemoteUser();
- UserGroupInformation callerUGI = null;
- if (remoteUser != null) {
- callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
- }
- if (callerUGI != null
- && !this.aclsManager.checkAccess(callerUGI,
- ApplicationAccessType.VIEW_APP, app.getUser(), appID)) {
- setStatus(HttpServletResponse.SC_UNAUTHORIZED);
- setTitle("Unauthorized request for viewing application " + appID);
- renderText("You (User " + remoteUser
- + ") are not authorized to view the logs for application " + appID);
- return;
- }
-
- setTitle(join("Application ", aid));
-
- ResponseInfo info = info("Application Overview").
- _("User:", app.getUser()).
- _("Name:", app.getName()).
- _("State:", app.getState()).
- _("FinalStatus:", app.getFinalStatus()).
- _("Started:", Times.format(app.getStartTime())).
- _("Elapsed:", StringUtils.formatTime(
- Times.elapsed(app.getStartTime(), app.getFinishTime()))).
- _("Tracking URL:", !app.isTrackingUrlReady() ?
- "#" : app.getTrackingUrlPretty(), app.getTrackingUI()).
- _("Diagnostics:", app.getNote());
- if (app.amContainerLogsExist()) {
- info._("AM container logs:", app.getAMContainerLogs(), app.getAMContainerLogs());
- } else {
- info._("AM container logs:", "");
- }
render(AppPage.class);
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java Thu May 3 02:14:01 2012
@@ -137,7 +137,7 @@ public class TestFifoScheduler {
rm.stop();
}
- private void testMinimumAllocation(YarnConfiguration conf)
+ private void testMinimumAllocation(YarnConfiguration conf, int testAlloc)
throws Exception {
MockRM rm = new MockRM(conf);
rm.start();
@@ -146,7 +146,7 @@ public class TestFifoScheduler {
MockNM nm1 = rm.registerNode("h1:1234", 6 * GB);
// Submit an application
- RMApp app1 = rm.submitApp(256);
+ RMApp app1 = rm.submitApp(testAlloc);
// kick the scheduling
nm1.nodeHeartbeat(true);
@@ -157,7 +157,8 @@ public class TestFifoScheduler {
nm1.getNodeId());
int checkAlloc =
- conf.getInt("yarn.scheduler.fifo.minimum-allocation-mb", GB);
+ conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
Assert.assertEquals(checkAlloc, report_nm1.getUsedResource().getMemory());
rm.stop();
@@ -165,14 +166,20 @@ public class TestFifoScheduler {
@Test
public void testDefaultMinimumAllocation() throws Exception {
- testMinimumAllocation(new YarnConfiguration());
+ // Test with something lesser than default
+ testMinimumAllocation(
+ new YarnConfiguration(),
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB / 2);
}
@Test
public void testNonDefaultMinimumAllocation() throws Exception {
+ // Set custom min-alloc to test tweaking it
+ int allocMB = 512;
YarnConfiguration conf = new YarnConfiguration();
- conf.setInt("yarn.scheduler.fifo.minimum-allocation-mb", 512);
- testMinimumAllocation(conf);
+ conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, allocMB);
+ // Test for something lesser than this.
+ testMinimumAllocation(conf, allocMB / 2);
}
@Test
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java Thu May 3 02:14:01 2012
@@ -31,6 +31,7 @@ import javax.xml.parsers.DocumentBuilder
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.records.QueueState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
@@ -540,8 +541,12 @@ public class TestRMWebServices extends J
assertEquals("qstate doesn't match", QueueState.RUNNING.toString(), state);
assertEquals("capacity doesn't match", 1.0, capacity, 0.0);
assertEquals("usedCapacity doesn't match", 0.0, usedCapacity, 0.0);
- assertEquals("minQueueMemoryCapacity doesn't match", 1024, minQueueCapacity);
- assertEquals("maxQueueMemoryCapacity doesn't match", 10240,
+ assertEquals(
+ "minQueueMemoryCapacity doesn't match",
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
+ minQueueCapacity);
+ assertEquals("maxQueueMemoryCapacity doesn't match",
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
maxQueueCapacity);
assertEquals("numNodes doesn't match", 0, numNodes);
assertEquals("usedNodeCapacity doesn't match", 0, usedNodeCapacity);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java Thu May 3 02:14:01 2012
@@ -20,6 +20,9 @@ package org.apache.hadoop.yarn.server;
import java.io.File;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -113,7 +116,16 @@ public class MiniYARNCluster extends Com
public NodeManager getNodeManager(int i) {
return this.nodeManagers[i];
}
-
+
+ public static String getHostname() {
+ try {
+ return InetAddress.getLocalHost().getHostName();
+ }
+ catch (UnknownHostException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
private class ResourceManagerWrapper extends AbstractService {
public ResourceManagerWrapper() {
super(ResourceManagerWrapper.class.getName());
@@ -122,6 +134,19 @@ public class MiniYARNCluster extends Com
@Override
public synchronized void start() {
try {
+ getConfig().setBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, true);
+ getConfig().set(YarnConfiguration.RM_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_ADMIN_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_SCHEDULER_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.RM_WEBAPP_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
Store store = StoreFactory.getStore(getConfig());
resourceManager = new ResourceManager(store) {
@Override
@@ -151,6 +176,10 @@ public class MiniYARNCluster extends Com
} catch (Throwable t) {
throw new YarnException(t);
}
+ LOG.info("MiniYARN ResourceManager address: " +
+ getConfig().get(YarnConfiguration.RM_ADDRESS));
+ LOG.info("MiniYARN ResourceManager web address: " +
+ getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS));
}
@Override
@@ -212,9 +241,12 @@ public class MiniYARNCluster extends Com
remoteLogDir.getAbsolutePath());
// By default AM + 2 containers
getConfig().setInt(YarnConfiguration.NM_PMEM_MB, 4*1024);
- getConfig().set(YarnConfiguration.NM_ADDRESS, "0.0.0.0:0");
- getConfig().set(YarnConfiguration.NM_LOCALIZER_ADDRESS, "0.0.0.0:0");
- getConfig().set(YarnConfiguration.NM_WEBAPP_ADDRESS, "0.0.0.0:0");
+ getConfig().set(YarnConfiguration.NM_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.NM_LOCALIZER_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(YarnConfiguration.NM_WEBAPP_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
LOG.info("Starting NM: " + index);
nodeManagers[index].init(getConfig());
new Thread() {
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java Thu May 3 02:14:01 2012
@@ -47,6 +47,7 @@ import org.apache.hadoop.io.DataInputBuf
import org.apache.hadoop.io.Text;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.api.AMRMProtocol;
@@ -401,7 +402,8 @@ public class TestContainerManagerSecurit
appToken.setService(new Text(schedulerAddr.getHostName() + ":"
+ schedulerAddr.getPort()));
currentUser.addToken(appToken);
-
+ SecurityUtil.setTokenService(appToken, schedulerAddr);
+
AMRMProtocol scheduler = currentUser
.doAs(new PrivilegedAction<AMRMProtocol>() {
@Override
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm Thu May 3 02:14:01 2012
@@ -245,6 +245,14 @@ Hadoop MapReduce Next Generation - Clust
| | <<<ResourceManager>>> Scheduler class. | |
| | | <<<CapacityScheduler>>> (recommended) or <<<FifoScheduler>>> |
*-------------------------+-------------------------+------------------------+
+| <<<yarn.scheduler.minimum-allocation-mb>>> | | |
+| | Minimum limit of memory to allocate to each container request at the <<<Resource Manager>>>. | |
+| | | In MBs |
+*-------------------------+-------------------------+------------------------+
+| <<<yarn.scheduler.maximum-allocation-mb>>> | | |
+| | Maximum limit of memory to allocate to each container request at the <<<Resource Manager>>>. | |
+| | | In MBs |
+*-------------------------+-------------------------+------------------------+
| <<<yarn.resourcemanager.nodes.include-path>>> / | | |
| <<<yarn.resourcemanager.nodes.exclude-path>>> | | |
| | List of permitted/excluded NodeManagers. | |
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/pom.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/pom.xml Thu May 3 02:14:01 2012
@@ -90,10 +90,6 @@
<artifactId>netty</artifactId>
</dependency>
<dependency>
- <groupId>com.cenqua.clover</groupId>
- <artifactId>clover</artifactId>
- </dependency>
- <dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/pom.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/pom.xml Thu May 3 02:14:01 2012
@@ -159,10 +159,6 @@
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
- <dependency>
- <groupId>com.cenqua.clover</groupId>
- <artifactId>clover</artifactId>
- </dependency>
</dependencies>
@@ -201,18 +197,6 @@
</executions>
</plugin>
<plugin>
- <groupId>com.atlassian.maven.plugins</groupId>
- <artifactId>maven-clover2-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>instrument</goal>
- <goal>aggregate</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<configuration>
@@ -301,12 +285,6 @@
<xmlOutput>true</xmlOutput>
</configuration>
</plugin>
- <plugin>
- <groupId>com.atlassian.maven.plugins</groupId>
- <artifactId>maven-clover2-plugin</artifactId>
- <!-- until we have reporting management cf. MSITE-443 -->
- <version>3.0.2</version>
- </plugin>
</plugins>
</reporting>
</project>
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/c++/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/c++:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/block_forensics/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/build.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/data_join/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:r1327719-1333290
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/gridmix/src/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java Thu May 3 02:14:01 2012
@@ -85,10 +85,10 @@ class CompressionEmulationUtil {
"gridmix.compression-emulation.map-output.compression-ratio";
/**
- * Configuration property for setting the compression ratio of reduce output.
+ * Configuration property for setting the compression ratio of job output.
*/
- private static final String GRIDMIX_REDUCE_OUTPUT_COMPRESSION_RATIO =
- "gridmix.compression-emulation.reduce-output.compression-ratio";
+ private static final String GRIDMIX_JOB_OUTPUT_COMPRESSION_RATIO =
+ "gridmix.compression-emulation.job-output.compression-ratio";
/**
* Default compression ratio.
@@ -434,20 +434,20 @@ class CompressionEmulationUtil {
}
/**
- * Set the reduce output data compression ratio in the given configuration.
+ * Set the job output data compression ratio in the given configuration.
*/
- static void setReduceOutputCompressionEmulationRatio(Configuration conf,
- float ratio) {
- conf.setFloat(GRIDMIX_REDUCE_OUTPUT_COMPRESSION_RATIO, ratio);
+ static void setJobOutputCompressionEmulationRatio(Configuration conf,
+ float ratio) {
+ conf.setFloat(GRIDMIX_JOB_OUTPUT_COMPRESSION_RATIO, ratio);
}
/**
- * Get the reduce output data compression ratio using the given configuration.
+ * Get the job output data compression ratio using the given configuration.
* If the compression ratio is not set in the configuration then use the
* default value i.e {@value #DEFAULT_COMPRESSION_RATIO}.
*/
- static float getReduceOutputCompressionEmulationRatio(Configuration conf) {
- return conf.getFloat(GRIDMIX_REDUCE_OUTPUT_COMPRESSION_RATIO,
+ static float getJobOutputCompressionEmulationRatio(Configuration conf) {
+ return conf.getFloat(GRIDMIX_JOB_OUTPUT_COMPRESSION_RATIO,
DEFAULT_COMPRESSION_RATIO);
}