You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by su...@apache.org on 2011/09/29 02:42:55 UTC
svn commit: r1177130 [4/7] - in
/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/
hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apa...
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java Thu Sep 29 00:42:47 2011
@@ -68,8 +68,8 @@ import org.apache.hadoop.metrics2.lib.De
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.ClientRMProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
@@ -78,8 +78,8 @@ import org.apache.hadoop.yarn.api.protoc
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@@ -245,7 +245,7 @@ public class TestClientRedirect {
}
@Override
- public GetNewApplicationIdResponse getNewApplicationId(GetNewApplicationIdRequest request) throws YarnRemoteException {
+ public GetNewApplicationResponse getNewApplication(GetNewApplicationRequest request) throws YarnRemoteException {
return null;
}
@@ -267,6 +267,13 @@ public class TestClientRedirect {
application.setHost(split[0]);
application.setRpcPort(Integer.parseInt(split[1]));
application.setUser("TestClientRedirect-user");
+ application.setName("N/A");
+ application.setQueue("N/A");
+ application.setStartTime(0);
+ application.setFinishTime(0);
+ application.setTrackingUrl("N/A");
+ application.setDiagnostics("N/A");
+
GetApplicationReportResponse response = recordFactory
.newRecordInstance(GetApplicationReportResponse.class);
response.setApplicationReport(application);
@@ -281,9 +288,9 @@ public class TestClientRedirect {
}
@Override
- public FinishApplicationResponse finishApplication(
- FinishApplicationRequest request) throws YarnRemoteException {
- return null;
+ public KillApplicationResponse forceKillApplication(
+ KillApplicationRequest request) throws YarnRemoteException {
+ return recordFactory.newRecordInstance(KillApplicationResponse.class);
}
@Override
@@ -444,7 +451,7 @@ public class TestClientRedirect {
@Override
public KillJobResponse killJob(KillJobRequest request)
throws YarnRemoteException {
- return null;
+ return recordFactory.newRecordInstance(KillJobResponse.class);
}
@Override
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java Thu Sep 29 00:42:47 2011
@@ -109,7 +109,7 @@ public class TestClientServiceDelegate {
ClientServiceDelegate clientServiceDelegate = getClientServiceDelegate(
null, getRMDelegate());
JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
- Assert.assertEquals("Unknown User", jobStatus.getUsername());
+ Assert.assertEquals("N/A", jobStatus.getUsername());
Assert.assertEquals(JobStatus.State.PREP, jobStatus.getState());
//RM has app report and job History Server is not configured
@@ -145,6 +145,13 @@ public class TestClientServiceDelegate {
.newRecord(ApplicationReport.class);
applicationReport.setState(ApplicationState.SUCCEEDED);
applicationReport.setUser("root");
+ applicationReport.setHost("N/A");
+ applicationReport.setName("N/A");
+ applicationReport.setQueue("N/A");
+ applicationReport.setStartTime(0);
+ applicationReport.setFinishTime(0);
+ applicationReport.setTrackingUrl("N/A");
+ applicationReport.setDiagnostics("N/A");
return applicationReport;
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java Thu Sep 29 00:42:47 2011
@@ -43,9 +43,15 @@ import org.apache.hadoop.yarn.service.Se
*/
public class MiniMRYarnCluster extends MiniYARNCluster {
+ public static final String HADOOP_MAPREDUCE_CLIENT_APP_JAR_NAME =
+ "hadoop-mapreduce-client-app-0.24.0-SNAPSHOT.jar";
+
+ public static final String YARN_MAPREDUCE_APP_JAR_PATH =
+ "$YARN_HOME/modules/" + HADOOP_MAPREDUCE_CLIENT_APP_JAR_NAME;
+
public static final String APPJAR =
"../hadoop-mapreduce-client-app/target/"
- + MRConstants.HADOOP_MAPREDUCE_CLIENT_APP_JAR_NAME;
+ + HADOOP_MAPREDUCE_CLIENT_APP_JAR_NAME;
private static final Log LOG = LogFactory.getLog(MiniMRYarnCluster.class);
private JobHistoryServer historyServer;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java Thu Sep 29 00:42:47 2011
@@ -402,7 +402,7 @@ public class TestMRJobs {
// both should be reachable via the class loader.
Assert.assertNotNull(cl.getResource("distributed.jar.inside2"));
Assert.assertNotNull(cl.getResource("distributed.jar.inside3"));
- Assert.assertNull(cl.getResource("distributed.jar.inside4"));
+ Assert.assertNotNull(cl.getResource("distributed.jar.inside4"));
// Check that the symlink for the renaming was created in the cwd;
File symlinkFile = new File("distributed.first.symlink");
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java Thu Sep 29 00:42:47 2011
@@ -22,6 +22,7 @@ import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File;
@@ -36,15 +37,37 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.ClientCache;
+import org.apache.hadoop.mapred.ClientServiceDelegate;
import org.apache.hadoop.mapred.ResourceMgrDelegate;
import org.apache.hadoop.mapred.YARNRunner;
import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.JobPriority;
+import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.yarn.api.ClientRMProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
+import org.apache.hadoop.yarn.api.records.QueueInfo;
+import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -54,9 +77,8 @@ import org.mockito.invocation.Invocation
import org.mockito.stubbing.Answer;
/**
- * Test if the jobclient shows enough diagnostics
- * on a job failure.
- *
+ * Test YarnRunner and make sure the client side plugin works
+ * fine
*/
public class TestYARNRunner extends TestCase {
private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
@@ -65,18 +87,22 @@ public class TestYARNRunner extends Test
private YARNRunner yarnRunner;
private ResourceMgrDelegate resourceMgrDelegate;
private YarnConfiguration conf;
+ private ClientCache clientCache;
private ApplicationId appId;
private JobID jobId;
private File testWorkDir =
new File("target", TestYARNRunner.class.getName());
private ApplicationSubmissionContext submissionContext;
+ private ClientServiceDelegate clientDelegate;
private static final String failString = "Rejected job";
@Before
public void setUp() throws Exception {
resourceMgrDelegate = mock(ResourceMgrDelegate.class);
conf = new YarnConfiguration();
- yarnRunner = new YARNRunner(conf, resourceMgrDelegate);
+ clientCache = new ClientCache(conf, resourceMgrDelegate);
+ clientCache = spy(clientCache);
+ yarnRunner = new YARNRunner(conf, resourceMgrDelegate, clientCache);
yarnRunner = spy(yarnRunner);
submissionContext = mock(ApplicationSubmissionContext.class);
doAnswer(
@@ -102,6 +128,31 @@ public class TestYARNRunner extends Test
@Test
+ public void testJobKill() throws Exception {
+ clientDelegate = mock(ClientServiceDelegate.class);
+ when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
+ org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
+ State.PREP, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
+ when(clientDelegate.killJob(any(JobID.class))).thenReturn(true);
+ doAnswer(
+ new Answer<ClientServiceDelegate>() {
+ @Override
+ public ClientServiceDelegate answer(InvocationOnMock invocation)
+ throws Throwable {
+ return clientDelegate;
+ }
+ }
+ ).when(clientCache).getClient(any(JobID.class));
+ yarnRunner.killJob(jobId);
+ verify(resourceMgrDelegate).killApplication(appId);
+ when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
+ org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
+ State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
+ yarnRunner.killJob(jobId);
+ verify(clientDelegate).killJob(jobId);
+ }
+
+ @Test
public void testJobSubmissionFailure() throws Exception {
when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
thenReturn(appId);
@@ -122,4 +173,66 @@ public class TestYARNRunner extends Test
assertTrue(io.getLocalizedMessage().contains(failString));
}
}
+
+ @Test
+ public void testResourceMgrDelegate() throws Exception {
+ /* we not want a mock of resourcemgr deleagte */
+ ClientRMProtocol clientRMProtocol = mock(ClientRMProtocol.class);
+ ResourceMgrDelegate delegate = new ResourceMgrDelegate(conf, clientRMProtocol);
+ /* make sure kill calls finish application master */
+ when(clientRMProtocol.forceKillApplication(any(KillApplicationRequest.class)))
+ .thenReturn(null);
+ delegate.killApplication(appId);
+ verify(clientRMProtocol).forceKillApplication(any(KillApplicationRequest.class));
+
+ /* make sure getalljobs calls get all applications */
+ when(clientRMProtocol.getAllApplications(any(GetAllApplicationsRequest.class))).
+ thenReturn(recordFactory.newRecordInstance(GetAllApplicationsResponse.class));
+ delegate.getAllJobs();
+ verify(clientRMProtocol).getAllApplications(any(GetAllApplicationsRequest.class));
+
+ /* make sure getapplication report is called */
+ when(clientRMProtocol.getApplicationReport(any(GetApplicationReportRequest.class)))
+ .thenReturn(recordFactory.newRecordInstance(GetApplicationReportResponse.class));
+ delegate.getApplicationReport(appId);
+ verify(clientRMProtocol).getApplicationReport(any(GetApplicationReportRequest.class));
+
+ /* make sure metrics is called */
+ GetClusterMetricsResponse clusterMetricsResponse = recordFactory.newRecordInstance
+ (GetClusterMetricsResponse.class);
+ clusterMetricsResponse.setClusterMetrics(recordFactory.newRecordInstance(
+ YarnClusterMetrics.class));
+ when(clientRMProtocol.getClusterMetrics(any(GetClusterMetricsRequest.class)))
+ .thenReturn(clusterMetricsResponse);
+ delegate.getClusterMetrics();
+ verify(clientRMProtocol).getClusterMetrics(any(GetClusterMetricsRequest.class));
+
+ when(clientRMProtocol.getClusterNodes(any(GetClusterNodesRequest.class))).
+ thenReturn(recordFactory.newRecordInstance(GetClusterNodesResponse.class));
+ delegate.getActiveTrackers();
+ verify(clientRMProtocol).getClusterNodes(any(GetClusterNodesRequest.class));
+
+ GetNewApplicationResponse newAppResponse = recordFactory.newRecordInstance(
+ GetNewApplicationResponse.class);
+ newAppResponse.setApplicationId(appId);
+ when(clientRMProtocol.getNewApplication(any(GetNewApplicationRequest.class))).
+ thenReturn(newAppResponse);
+ delegate.getNewJobID();
+ verify(clientRMProtocol).getNewApplication(any(GetNewApplicationRequest.class));
+
+ GetQueueInfoResponse queueInfoResponse = recordFactory.newRecordInstance(
+ GetQueueInfoResponse.class);
+ queueInfoResponse.setQueueInfo(recordFactory.newRecordInstance(QueueInfo.class));
+ when(clientRMProtocol.getQueueInfo(any(GetQueueInfoRequest.class))).
+ thenReturn(queueInfoResponse);
+ delegate.getQueues();
+ verify(clientRMProtocol).getQueueInfo(any(GetQueueInfoRequest.class));
+
+ GetQueueUserAclsInfoResponse aclResponse = recordFactory.newRecordInstance(
+ GetQueueUserAclsInfoResponse.class);
+ when(clientRMProtocol.getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class)))
+ .thenReturn(aclResponse);
+ delegate.getQueueAclsForCurrentUser();
+ verify(clientRMProtocol).getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class));
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml Thu Sep 29 00:42:47 2011
@@ -90,6 +90,12 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
+ <version>${yarn.version}</version>
+ <type>test-jar</type>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop-mapreduce.version}</version>
</dependency>
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/README
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/README?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/README (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/README Thu Sep 29 00:42:47 2011
@@ -30,7 +30,6 @@ clean and test: mvn clean install
run selected test after compile: mvn test -Dtest=TestClassName (combined: mvn clean install -Dtest=TestClassName)
create runnable binaries after install: mvn assembly:assembly (combined: mvn clean install assembly:assembly)
-
Eclipse Projects
----------------
http://maven.apache.org/guides/mini/guide-ide-eclipse.html
@@ -71,3 +70,16 @@ hadoop-yarn-server - Implementation of t
hadoop-yarn-server-common - APIs shared between resourcemanager and nodemanager
hadoop-yarn-server-nodemanager (TaskTracker replacement)
hadoop-yarn-server-resourcemanager (JobTracker replacement)
+
+Utilities for understanding the code
+------------------------------------
+Almost all of the yarn components as well as the mapreduce framework use
+state-machines for all the data objects. To understand those central pieces of
+the code, a visual representation of the state-machines helps much. You can first
+convert the state-machines into graphviz(.gv) format by
+running:
+ mvn compile -Pvisualize
+Then you can use the dot program for generating directed graphs and convert the above
+.gv files to images. The graphviz package has the needed dot program and related
+utilites.For e.g., to generate png files you can run:
+ dot -Tpng NodeManager.gv > NodeManager.png
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml Thu Sep 29 00:42:47 2011
@@ -50,6 +50,10 @@
<Bug pattern="BC_UNCONFIRMED_CAST" />
</Match>
<Match>
+ <Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.rmapp\.RMAppImpl.*" />
+ <Bug pattern="BC_UNCONFIRMED_CAST" />
+ </Match>
+ <Match>
<Class name="~org\.apache\.hadoop\.yarn\.server\.resourcemanager\.rmapp\.attempt\.RMAppAttemptImpl.*" />
<Bug pattern="BC_UNCONFIRMED_CAST" />
</Match>
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java Thu Sep 29 00:42:47 2011
@@ -37,8 +37,11 @@ public interface ApplicationConstants {
public static final String APPLICATION_CLIENT_SECRET_ENV_NAME =
"AppClientTokenEnv";
- // TODO: Weird. This is part of AM command line. Instead it should be a env.
- public static final String AM_FAIL_COUNT_STRING = "<FAILCOUNT>";
+ /**
+ * The environmental variable for APPLICATION_ATTEMPT_ID. Set in
+ * ApplicationMaster's environment only.
+ */
+ public static final String APPLICATION_ATTEMPT_ID_ENV = "APPLICATION_ATTEMPT_ID";
public static final String CONTAINER_TOKEN_FILE_ENV_NAME =
UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;
@@ -46,4 +49,117 @@ public interface ApplicationConstants {
public static final String LOCAL_DIR_ENV = "YARN_LOCAL_DIRS";
public static final String LOG_DIR_EXPANSION_VAR = "<LOG_DIR>";
+
+ public static final String STDERR = "stderr";
+
+ public static final String STDOUT = "stdout";
+
+ /**
+ * Classpath for typical applications.
+ */
+ public static final String[] APPLICATION_CLASSPATH =
+ new String[] {
+ "$HADOOP_CONF_DIR",
+ "$HADOOP_COMMON_HOME/share/hadoop/common/*",
+ "$HADOOP_COMMON_HOME/share/hadoop/common/lib/*",
+ "$HADOOP_HDFS_HOME/share/hadoop/hdfs/*",
+ "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*",
+ "$YARN_HOME/modules/*",
+ "$YARN_HOME/lib/*"
+ };
+
+ /**
+ * Environment for Applications.
+ *
+ * Some of the environment variables for applications are <em>final</em>
+ * i.e. they cannot be modified by the applications.
+ */
+ public enum Environment {
+ /**
+ * $USER
+ * Final, non-modifiable.
+ */
+ USER("USER"),
+
+ /**
+ * $LOGNAME
+ * Final, non-modifiable.
+ */
+ LOGNAME("LOGNAME"),
+
+ /**
+ * $HOME
+ * Final, non-modifiable.
+ */
+ HOME("HOME"),
+
+ /**
+ * $PWD
+ * Final, non-modifiable.
+ */
+ PWD("PWD"),
+
+ /**
+ * $PATH
+ */
+ PATH("PATH"),
+
+ /**
+ * $SHELL
+ */
+ SHELL("SHELL"),
+
+ /**
+ * $JAVA_HOME
+ */
+ JAVA_HOME("JAVA_HOME"),
+
+ /**
+ * $CLASSPATH
+ */
+ CLASSPATH("CLASSPATH"),
+
+ /**
+ * $LD_LIBRARY_PATH
+ */
+ LD_LIBRARY_PATH("LD_LIBRARY_PATH"),
+
+ /**
+ * $HADOOP_CONF_DIR
+ * Final, non-modifiable.
+ */
+ HADOOP_CONF_DIR("HADOOP_CONF_DIR"),
+
+ /**
+ * $HADOOP_COMMON_HOME
+ */
+ HADOOP_COMMON_HOME("HADOOP_COMMON_HOME"),
+
+ /**
+ * $HADOOP_HDFS_HOME
+ */
+ HADOOP_HDFS_HOME("HADOOP_HDFS_HOME"),
+
+ /**
+ * $YARN_HOME
+ */
+ YARN_HOME("YARN_HOME");
+
+ private final String variable;
+ private Environment(String variable) {
+ this.variable = variable;
+ }
+
+ public String key() {
+ return variable;
+ }
+
+ public String toString() {
+ return variable;
+ }
+
+ public String $() {
+ return "$" + variable;
+ }
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java Thu Sep 29 00:42:47 2011
@@ -21,8 +21,8 @@ package org.apache.hadoop.yarn.api;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
@@ -31,8 +31,8 @@ import org.apache.hadoop.yarn.api.protoc
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
@@ -62,14 +62,18 @@ public interface ClientRMProtocol {
* <p>The <code>ResourceManager</code> responds with a new, monotonically
* increasing, {@link ApplicationId} which is used by the client to submit
* a new application.</p>
- *
+ *
+ * <p>The <code>ResourceManager</code> also responds with details such
+ * as minimum and maximum resource capabilities in the cluster as specified in
+ * {@link GetNewApplicationResponse}.</p>
+ *
* @param request request to get a new <code>ApplicationId</code>
* @return new <code>ApplicationId</code> to be used to submit an application
* @throws YarnRemoteException
* @see #submitApplication(SubmitApplicationRequest)
*/
- public GetNewApplicationIdResponse getNewApplicationId(
- GetNewApplicationIdRequest request)
+ public GetNewApplicationResponse getNewApplication(
+ GetNewApplicationRequest request)
throws YarnRemoteException;
/**
@@ -92,7 +96,7 @@ public interface ClientRMProtocol {
* @param request request to submit a new application
* @return (empty) response on accepting the submission
* @throws YarnRemoteException
- * @see #getNewApplicationId(GetNewApplicationIdRequest)
+ * @see #getNewApplication(GetNewApplicationRequest)
*/
public SubmitApplicationResponse submitApplication(
SubmitApplicationRequest request)
@@ -102,7 +106,7 @@ public interface ClientRMProtocol {
* <p>The interface used by clients to request the
* <code>ResourceManager</code> to abort submitted application.</p>
*
- * <p>The client, via {@link FinishApplicationRequest} provides the
+ * <p>The client, via {@link KillApplicationRequest} provides the
* {@link ApplicationId} of the application to be aborted.</p>
*
* <p> In secure mode,the <code>ResourceManager</code> verifies access to the
@@ -117,8 +121,8 @@ public interface ClientRMProtocol {
* @throws YarnRemoteException
* @see #getQueueUserAcls(GetQueueUserAclsInfoRequest)
*/
- public FinishApplicationResponse finishApplication(
- FinishApplicationRequest request)
+ public KillApplicationResponse forceKillApplication(
+ KillApplicationRequest request)
throws YarnRemoteException;
/**
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java Thu Sep 29 00:42:47 2011
@@ -186,4 +186,16 @@ public interface ApplicationReport {
@Private
@Unstable
void setStartTime(long startTime);
+
+ /**
+ * Get the <em>finish time</em> of the application.
+ * @return <em>finish time</em> of the application
+ */
+ @Public
+ @Stable
+ long getFinishTime();
+
+ @Private
+ @Unstable
+ void setFinishTime(long finishTime);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/Container.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/Container.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/Container.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/Container.java Thu Sep 29 00:42:47 2011
@@ -43,6 +43,7 @@ import org.apache.hadoop.yarn.api.Contai
* </li>
* <li>HTTP uri of the node.</li>
* <li>{@link Resource} allocated to the container.</li>
+ * <li>{@link Priority} at which the container was allocated.</li>
* <li>{@link ContainerState} of the container.</li>
* <li>
* {@link ContainerToken} of the container, used to securely verify
@@ -111,6 +112,18 @@ public interface Container extends Compa
@Private
@Unstable
void setResource(Resource resource);
+
+ /**
+ * Get the <code>Priority</code> at which the <code>Container</code> was
+ * allocated.
+ * @return <code>Priority</code> at which the <code>Container</code> was
+ * allocated
+ */
+ Priority getPriority();
+
+ @Private
+ @Unstable
+ void setPriority(Priority priority);
/**
* Get the current <code>ContainerState</code> of the container.
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java Thu Sep 29 00:42:47 2011
@@ -240,6 +240,30 @@ implements ApplicationReport {
return proto;
}
+ @Override
+ public long getStartTime() {
+ ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
+ return p.getStartTime();
+ }
+
+ @Override
+ public void setStartTime(long startTime) {
+ maybeInitBuilder();
+ builder.setStartTime(startTime);
+ }
+
+ @Override
+ public long getFinishTime() {
+ ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
+ return p.getFinishTime();
+ }
+
+ @Override
+ public void setFinishTime(long finishTime) {
+ maybeInitBuilder();
+ builder.setFinishTime(finishTime);
+ }
+
private void mergeLocalToBuilder() {
if (this.applicationId != null
&& !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
@@ -279,16 +303,4 @@ implements ApplicationReport {
ApplicationIdProto applicationId) {
return new ApplicationIdPBImpl(applicationId);
}
-
- @Override
- public long getStartTime() {
- ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
- return p.getStartTime();
- }
-
- @Override
- public void setStartTime(long startTime) {
- maybeInitBuilder();
- builder.setStartTime(startTime);
- }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java Thu Sep 29 00:42:47 2011
@@ -25,6 +25,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.ContainerToken;
import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
@@ -34,6 +35,7 @@ import org.apache.hadoop.yarn.proto.Yarn
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerTokenProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
import org.apache.hadoop.yarn.util.ProtoUtils;
@@ -48,6 +50,7 @@ public class ContainerPBImpl extends Pro
private ContainerId containerId = null;
private NodeId nodeId = null;
private Resource resource = null;
+ private Priority priority = null;
private ContainerToken containerToken = null;
private ContainerStatus containerStatus = null;
@@ -84,6 +87,11 @@ public class ContainerPBImpl extends Pro
builder.getResource())) {
builder.setResource(convertToProtoFormat(this.resource));
}
+ if (this.priority != null &&
+ !((PriorityPBImpl) this.priority).getProto().equals(
+ builder.getPriority())) {
+ builder.setPriority(convertToProtoFormat(this.priority));
+ }
if (this.containerToken != null
&& !((ContainerTokenPBImpl) this.containerToken).getProto().equals(
builder.getContainerToken())) {
@@ -211,6 +219,29 @@ public class ContainerPBImpl extends Pro
builder.clearResource();
this.resource = resource;
}
+
+ @Override
+ public Priority getPriority() {
+ ContainerProtoOrBuilder p = viaProto ? proto : builder;
+ if (this.priority != null) {
+ return this.priority;
+ }
+ if (!p.hasPriority()) {
+ return null;
+ }
+ this.priority = convertFromProtoFormat(p.getPriority());
+ return this.priority;
+ }
+
+ @Override
+ public void setPriority(Priority priority) {
+ maybeInitBuilder();
+ if (priority == null) {
+ builder.clearPriority();
+ }
+ this.priority = priority;
+ }
+
@Override
public ContainerToken getContainerToken() {
ContainerProtoOrBuilder p = viaProto ? proto : builder;
@@ -285,6 +316,14 @@ public class ContainerPBImpl extends Pro
return ((ResourcePBImpl)t).getProto();
}
+ private PriorityPBImpl convertFromProtoFormat(PriorityProto p) {
+ return new PriorityPBImpl(p);
+ }
+
+ private PriorityProto convertToProtoFormat(Priority p) {
+ return ((PriorityPBImpl)p).getProto();
+ }
+
private ContainerTokenPBImpl convertFromProtoFormat(ContainerTokenProto p) {
return new ContainerTokenPBImpl(p);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto Thu Sep 29 00:42:47 2011
@@ -24,10 +24,10 @@ option java_generate_equals_and_hash = t
import "yarn_service_protos.proto";
service ClientRMProtocolService {
- rpc getNewApplicationId (GetNewApplicationIdRequestProto) returns (GetNewApplicationIdResponseProto);
+ rpc getNewApplication (GetNewApplicationRequestProto) returns (GetNewApplicationResponseProto);
rpc getApplicationReport (GetApplicationReportRequestProto) returns (GetApplicationReportResponseProto);
rpc submitApplication (SubmitApplicationRequestProto) returns (SubmitApplicationResponseProto);
- rpc finishApplication (FinishApplicationRequestProto) returns (FinishApplicationResponseProto);
+ rpc forceKillApplication (KillApplicationRequestProto) returns (KillApplicationResponseProto);
rpc getClusterMetrics (GetClusterMetricsRequestProto) returns (GetClusterMetricsResponseProto);
rpc getAllApplications (GetAllApplicationsRequestProto) returns (GetAllApplicationsResponseProto);
rpc getClusterNodes (GetClusterNodesRequestProto) returns (GetClusterNodesResponseProto);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto Thu Sep 29 00:42:47 2011
@@ -48,6 +48,10 @@ message ResourceProto {
optional int32 memory = 1;
}
+message PriorityProto {
+ optional int32 priority = 1;
+}
+
enum ContainerStateProto {
C_NEW = 1;
C_RUNNING = 2;
@@ -66,9 +70,10 @@ message ContainerProto {
optional NodeIdProto nodeId = 2;
optional string node_http_address = 3;
optional ResourceProto resource = 4;
- optional ContainerStateProto state = 5;
- optional ContainerTokenProto container_token = 6;
- optional ContainerStatusProto container_status = 7;
+ optional PriorityProto priority = 5;
+ optional ContainerStateProto state = 6;
+ optional ContainerTokenProto container_token = 7;
+ optional ContainerStatusProto container_status = 8;
}
enum ApplicationStateProto {
@@ -140,6 +145,7 @@ message ApplicationReportProto {
optional string trackingUrl = 11;
optional string diagnostics = 12 [default = "N/A"];
optional int64 startTime = 13;
+ optional int64 finishTime = 14;
}
message NodeIdProto {
@@ -252,10 +258,6 @@ message ContainerStatusProto {
////////////////////////////////////////////////////////////////////////
////// From common//////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////
-message PriorityProto {
- optional int32 priority = 1;
-}
-
message StringURLMapProto {
optional string key = 1;
optional URLProto value = 2;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto Thu Sep 29 00:42:47 2011
@@ -66,11 +66,13 @@ message AllocateResponseProto {
/////// client_RM_Protocol ///////////////////////////
//////////////////////////////////////////////////////
-message GetNewApplicationIdRequestProto {
+message GetNewApplicationRequestProto {
}
-message GetNewApplicationIdResponseProto {
+message GetNewApplicationResponseProto {
optional ApplicationIdProto application_id = 1;
+ optional ResourceProto minimumCapability = 2;
+ optional ResourceProto maximumCapability = 3;
}
message GetApplicationReportRequestProto {
@@ -88,11 +90,11 @@ message SubmitApplicationRequestProto {
message SubmitApplicationResponseProto {
}
-message FinishApplicationRequestProto {
+message KillApplicationRequestProto {
optional ApplicationIdProto application_id = 1;
}
-message FinishApplicationResponseProto {
+message KillApplicationResponseProto {
}
message GetClusterMetricsRequestProto {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java Thu Sep 29 00:42:47 2011
@@ -25,8 +25,6 @@ import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.yarn.api.ClientRMProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
@@ -35,16 +33,16 @@ import org.apache.hadoop.yarn.api.protoc
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationRequestPBImpl;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllApplicationsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllApplicationsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl;
@@ -53,27 +51,28 @@ import org.apache.hadoop.yarn.api.protoc
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdRequestPBImpl;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdResponsePBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoResponsePBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationRequestPBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.ipc.ProtoOverHadoopRpcEngine;
import org.apache.hadoop.yarn.proto.ClientRMProtocol.ClientRMProtocolService;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto;
import com.google.protobuf.ServiceException;
@@ -88,11 +87,11 @@ public class ClientRMProtocolPBClientImp
}
@Override
- public FinishApplicationResponse finishApplication(
- FinishApplicationRequest request) throws YarnRemoteException {
- FinishApplicationRequestProto requestProto = ((FinishApplicationRequestPBImpl)request).getProto();
+ public KillApplicationResponse forceKillApplication(
+ KillApplicationRequest request) throws YarnRemoteException {
+ KillApplicationRequestProto requestProto = ((KillApplicationRequestPBImpl)request).getProto();
try {
- return new FinishApplicationResponsePBImpl(proxy.finishApplication(null, requestProto));
+ return new KillApplicationResponsePBImpl(proxy.forceKillApplication(null, requestProto));
} catch (ServiceException e) {
if (e.getCause() instanceof YarnRemoteException) {
throw (YarnRemoteException)e.getCause();
@@ -139,11 +138,11 @@ public class ClientRMProtocolPBClientImp
}
@Override
- public GetNewApplicationIdResponse getNewApplicationId(
- GetNewApplicationIdRequest request) throws YarnRemoteException {
- GetNewApplicationIdRequestProto requestProto = ((GetNewApplicationIdRequestPBImpl)request).getProto();
+ public GetNewApplicationResponse getNewApplication(
+ GetNewApplicationRequest request) throws YarnRemoteException {
+ GetNewApplicationRequestProto requestProto = ((GetNewApplicationRequestPBImpl)request).getProto();
try {
- return new GetNewApplicationIdResponsePBImpl(proxy.getNewApplicationId(null, requestProto));
+ return new GetNewApplicationResponsePBImpl(proxy.getNewApplication(null, requestProto));
} catch (ServiceException e) {
if (e.getCause() instanceof YarnRemoteException) {
throw (YarnRemoteException)e.getCause();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java Thu Sep 29 00:42:47 2011
@@ -19,17 +19,15 @@
package org.apache.hadoop.yarn.api.impl.pb.service;
import org.apache.hadoop.yarn.api.ClientRMProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationIdResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationRequestPBImpl;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllApplicationsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllApplicationsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl;
@@ -38,18 +36,18 @@ import org.apache.hadoop.yarn.api.protoc
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdRequestPBImpl;
-import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationIdResponsePBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoResponsePBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationRequestPBImpl;
+import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.proto.ClientRMProtocol.ClientRMProtocolService.BlockingInterface;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationRequestProto;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
@@ -58,12 +56,14 @@ import org.apache.hadoop.yarn.proto.Yarn
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdRequestProto;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationIdResponseProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto;
+import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto;
@@ -79,12 +79,12 @@ public class ClientRMProtocolPBServiceIm
}
@Override
- public FinishApplicationResponseProto finishApplication(RpcController arg0,
- FinishApplicationRequestProto proto) throws ServiceException {
- FinishApplicationRequestPBImpl request = new FinishApplicationRequestPBImpl(proto);
+ public KillApplicationResponseProto forceKillApplication(RpcController arg0,
+ KillApplicationRequestProto proto) throws ServiceException {
+ KillApplicationRequestPBImpl request = new KillApplicationRequestPBImpl(proto);
try {
- FinishApplicationResponse response = real.finishApplication(request);
- return ((FinishApplicationResponsePBImpl)response).getProto();
+ KillApplicationResponse response = real.forceKillApplication(request);
+ return ((KillApplicationResponsePBImpl)response).getProto();
} catch (YarnRemoteException e) {
throw new ServiceException(e);
}
@@ -116,13 +116,13 @@ public class ClientRMProtocolPBServiceIm
}
@Override
- public GetNewApplicationIdResponseProto getNewApplicationId(
- RpcController arg0, GetNewApplicationIdRequestProto proto)
+ public GetNewApplicationResponseProto getNewApplication(
+ RpcController arg0, GetNewApplicationRequestProto proto)
throws ServiceException {
- GetNewApplicationIdRequestPBImpl request = new GetNewApplicationIdRequestPBImpl(proto);
+ GetNewApplicationRequestPBImpl request = new GetNewApplicationRequestPBImpl(proto);
try {
- GetNewApplicationIdResponse response = real.getNewApplicationId(request);
- return ((GetNewApplicationIdResponsePBImpl)response).getProto();
+ GetNewApplicationResponse response = real.getNewApplication(request);
+ return ((GetNewApplicationResponsePBImpl)response).getProto();
} catch (YarnRemoteException e) {
throw new ServiceException(e);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java Thu Sep 29 00:42:47 2011
@@ -357,6 +357,12 @@ public class YarnConfiguration extends C
public static final String NM_AUX_SERVICE_FMT =
NM_PREFIX + "aux-services.%s.class";
+ public static final String NM_USER_HOME_DIR =
+ NM_PREFIX + "user-home-dir";
+
+ public static final String DEFAULT_NM_USER_HOME_DIR= "/home/";
+
+
public static final int INVALID_CONTAINER_EXIT_STATUS = -1000;
public static final int ABORTED_CONTAINER_EXIT_STATUS = -100;
@@ -380,6 +386,6 @@ public class YarnConfiguration extends C
// Use apps manager address to figure out the host for webapp
addr = conf.get(YarnConfiguration.RM_ADDRESS, YarnConfiguration.DEFAULT_RM_ADDRESS);
String host = ADDR_SPLITTER.split(addr).iterator().next();
- return JOINER.join("http://", host, ":", port, "/");
+ return JOINER.join("http://", host, ":", port);
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/ProtoOverHadoopRpcEngine.java Thu Sep 29 00:42:47 2011
@@ -320,6 +320,12 @@ public class ProtoOverHadoopRpcEngine im
+ methodName);
MethodDescriptor methodDescriptor = service.getDescriptorForType()
.findMethodByName(methodName);
+ if (methodDescriptor == null) {
+ String msg = "Unknown method " + methodName + " called on "
+ + protocol + " protocol.";
+ LOG.warn(msg);
+ return handleException(new IOException(msg));
+ }
Message prototype = service.getRequestPrototype(methodDescriptor);
Message param = prototype.newBuilderForType()
.mergeFrom(rpcRequest.getRequestProto()).build();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/StateMachineFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/StateMachineFactory.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/StateMachineFactory.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/StateMachineFactory.java Thu Sep 29 00:42:47 2011
@@ -20,10 +20,14 @@ package org.apache.hadoop.yarn.state;
import java.util.EnumMap;
import java.util.HashMap;
+import java.util.Iterator;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
+import org.apache.hadoop.yarn.util.Graph;
+
/**
* State machine topology.
* This object is semantically immutable. If you have a
@@ -441,4 +445,39 @@ final public class StateMachineFactory
return currentState;
}
}
+
+ /**
+ * Generate a graph represents the state graph of this StateMachine
+ * @param name graph name
+ * @return Graph object generated
+ */
+ public Graph generateStateGraph(String name) {
+ maybeMakeStateMachineTable();
+ Graph g = new Graph(name);
+ for (STATE startState : stateMachineTable.keySet()) {
+ Map<EVENTTYPE, Transition<OPERAND, STATE, EVENTTYPE, EVENT>> transitions
+ = stateMachineTable.get(startState);
+ for (Entry<EVENTTYPE, Transition<OPERAND, STATE, EVENTTYPE, EVENT>> entry :
+ transitions.entrySet()) {
+ Transition<OPERAND, STATE, EVENTTYPE, EVENT> transition = entry.getValue();
+ if (transition instanceof StateMachineFactory.SingleInternalArc) {
+ StateMachineFactory.SingleInternalArc sa
+ = (StateMachineFactory.SingleInternalArc) transition;
+ Graph.Node fromNode = g.getNode(startState.toString());
+ Graph.Node toNode = g.getNode(sa.postState.toString());
+ fromNode.addEdge(toNode, entry.getKey().toString());
+ } else if (transition instanceof StateMachineFactory.MultipleInternalArc) {
+ StateMachineFactory.MultipleInternalArc ma
+ = (StateMachineFactory.MultipleInternalArc) transition;
+ Iterator<STATE> iter = ma.validPostStates.iterator();
+ while (iter.hasNext()) {
+ Graph.Node fromNode = g.getNode(startState.toString());
+ Graph.Node toNode = g.getNode(iter.next().toString());
+ fromNode.addEdge(toNode, entry.getKey().toString());
+ }
+ }
+ }
+ }
+ return g;
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java Thu Sep 29 00:42:47 2011
@@ -20,7 +20,9 @@ package org.apache.hadoop.yarn.util;
import java.net.URI;
import java.util.Comparator;
+import java.util.List;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
@@ -184,32 +186,31 @@ public class BuilderUtils {
return id;
}
- public static Container clone(Container c) {
- Container container = recordFactory.newRecordInstance(Container.class);
- container.setId(c.getId());
- container.setContainerToken(c.getContainerToken());
- container.setNodeId(c.getNodeId());
- container.setNodeHttpAddress(c.getNodeHttpAddress());
- container.setResource(c.getResource());
- container.setState(c.getState());
- return container;
+ public static NodeId newNodeId(String host, int port) {
+ NodeId nodeId = recordFactory.newRecordInstance(NodeId.class);
+ nodeId.setHost(host);
+ nodeId.setPort(port);
+ return nodeId;
}
public static Container newContainer(RecordFactory recordFactory,
ApplicationAttemptId appAttemptId, int containerId, NodeId nodeId,
- String nodeHttpAddress, Resource resource) {
+ String nodeHttpAddress, Resource resource, Priority priority) {
ContainerId containerID =
newContainerId(recordFactory, appAttemptId, containerId);
- return newContainer(containerID, nodeId, nodeHttpAddress, resource);
+ return newContainer(containerID, nodeId, nodeHttpAddress,
+ resource, priority);
}
public static Container newContainer(ContainerId containerId,
- NodeId nodeId, String nodeHttpAddress, Resource resource) {
+ NodeId nodeId, String nodeHttpAddress,
+ Resource resource, Priority priority) {
Container container = recordFactory.newRecordInstance(Container.class);
container.setId(containerId);
container.setNodeId(nodeId);
container.setNodeHttpAddress(nodeHttpAddress);
container.setResource(resource);
+ container.setPriority(priority);
container.setState(ContainerState.NEW);
ContainerStatus containerStatus = Records.newRecord(ContainerStatus.class);
containerStatus.setContainerId(containerId);
@@ -242,7 +243,7 @@ public class BuilderUtils {
public static ApplicationReport newApplicationReport(
ApplicationId applicationId, String user, String queue, String name,
String host, int rpcPort, String clientToken, ApplicationState state,
- String diagnostics, String url, long startTime) {
+ String diagnostics, String url, long startTime, long finishTime) {
ApplicationReport report = recordFactory
.newRecordInstance(ApplicationReport.class);
report.setApplicationId(applicationId);
@@ -256,6 +257,7 @@ public class BuilderUtils {
report.setDiagnostics(diagnostics);
report.setTrackingUrl(url);
report.setStartTime(startTime);
+ report.setFinishTime(finishTime);
return report;
}
@@ -273,5 +275,18 @@ public class BuilderUtils {
url.setFile(file);
return url;
}
-
+
+ public static AllocateRequest newAllocateRequest(
+ ApplicationAttemptId applicationAttemptId, int responseID,
+ float appProgress, List<ResourceRequest> resourceAsk,
+ List<ContainerId> containersToBeReleased) {
+ AllocateRequest allocateRequest = recordFactory
+ .newRecordInstance(AllocateRequest.class);
+ allocateRequest.setApplicationAttemptId(applicationAttemptId);
+ allocateRequest.setResponseId(responseID);
+ allocateRequest.setProgress(appProgress);
+ allocateRequest.addAllAsks(resourceAsk);
+ allocateRequest.addAllReleases(containersToBeReleased);
+ return allocateRequest;
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java Thu Sep 29 00:42:47 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.util;
import static org.apache.hadoop.yarn.util.StringHelper._split;
+import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.NumberFormat;
@@ -45,6 +46,8 @@ import org.apache.hadoop.yarn.factory.pr
public class ConverterUtils {
public static final String APPLICATION_PREFIX = "application";
+ public static final String CONTAINER_PREFIX = "container";
+ public static final String APPLICATION_ATTEMPT_PREFIX = "appattempt";
/**
* return a hadoop path from a given url
@@ -132,14 +135,12 @@ public class ConverterUtils {
}
private static ApplicationAttemptId toApplicationAttemptId(
- RecordFactory recordFactory,
- Iterator<String> it) {
- ApplicationId appId =
- recordFactory.newRecordInstance(ApplicationId.class);
+ Iterator<String> it) throws NumberFormatException {
+ ApplicationId appId = Records.newRecord(ApplicationId.class);
appId.setClusterTimestamp(Long.parseLong(it.next()));
appId.setId(Integer.parseInt(it.next()));
- ApplicationAttemptId appAttemptId =
- recordFactory.newRecordInstance(ApplicationAttemptId.class);
+ ApplicationAttemptId appAttemptId = Records
+ .newRecord(ApplicationAttemptId.class);
appAttemptId.setApplicationId(appId);
appAttemptId.setAttemptId(Integer.parseInt(it.next()));
return appAttemptId;
@@ -149,16 +150,35 @@ public class ConverterUtils {
return cId.toString();
}
- public static ContainerId toContainerId(RecordFactory recordFactory,
- String containerIdStr) {
+ public static ContainerId toContainerId(String containerIdStr)
+ throws IOException {
Iterator<String> it = _split(containerIdStr).iterator();
- it.next(); // prefix. TODO: Validate container prefix
- ApplicationAttemptId appAttemptID =
- toApplicationAttemptId(recordFactory, it);
- ContainerId containerId =
- recordFactory.newRecordInstance(ContainerId.class);
- containerId.setApplicationAttemptId(appAttemptID);
- containerId.setId(Integer.parseInt(it.next()));
- return containerId;
+ if (!it.next().equals(CONTAINER_PREFIX)) {
+ throw new IOException("Invalid ContainerId prefix: " + containerIdStr);
+ }
+ try {
+ ApplicationAttemptId appAttemptID = toApplicationAttemptId(it);
+ ContainerId containerId = Records.newRecord(ContainerId.class);
+ containerId.setApplicationAttemptId(appAttemptID);
+ containerId.setId(Integer.parseInt(it.next()));
+ return containerId;
+ } catch (NumberFormatException n) {
+ throw new IOException("Invalid ContainerId: " + containerIdStr, n);
+ }
+ }
+
+ public static ApplicationAttemptId toApplicationAttemptId(
+ String applicationAttmeptIdStr) throws IOException {
+ Iterator<String> it = _split(applicationAttmeptIdStr).iterator();
+ if (!it.next().equals(APPLICATION_ATTEMPT_PREFIX)) {
+ throw new IOException("Invalid AppAttemptId prefix: "
+ + applicationAttmeptIdStr);
+ }
+ try {
+ return toApplicationAttemptId(it);
+ } catch (NumberFormatException n) {
+ throw new IOException("Invalid AppAttemptId: "
+ + applicationAttmeptIdStr, n);
+ }
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Dispatcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Dispatcher.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Dispatcher.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/Dispatcher.java Thu Sep 29 00:42:47 2011
@@ -84,6 +84,15 @@ public class Dispatcher extends HttpServ
prepareToExit();
return;
}
+ // if they provide a redirectPath go there instead of going to
+ // "/" so that filters can differentiate the webapps.
+ if (uri.equals("/")) {
+ String redirectPath = webApp.getRedirectPath();
+ if (redirectPath != null && !redirectPath.isEmpty()) {
+ res.sendRedirect(redirectPath);
+ return;
+ }
+ }
String method = req.getMethod();
if (method.equals("OPTIONS")) {
doOptions(req, res);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java Thu Sep 29 00:42:47 2011
@@ -26,6 +26,7 @@ import com.google.inject.Provides;
import com.google.inject.servlet.GuiceFilter;
import com.google.inject.servlet.ServletModule;
+import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
@@ -44,6 +45,9 @@ public abstract class WebApp extends Ser
public enum HTTP { GET, POST, HEAD, PUT, DELETE };
private volatile String name;
+ private volatile List<String> servePathSpecs = new ArrayList<String>();
+ // path to redirect to if user goes to "/"
+ private volatile String redirectPath;
private volatile Configuration conf;
private volatile HttpServer httpServer;
private volatile GuiceFilter guiceFilter;
@@ -98,6 +102,22 @@ public abstract class WebApp extends Ser
public String name() { return this.name; }
+ void addServePathSpec(String path) { this.servePathSpecs.add(path); }
+
+ public String[] getServePathSpecs() {
+ return this.servePathSpecs.toArray(new String[this.servePathSpecs.size()]);
+ }
+
+ /**
+ * Set a path to redirect the user to if they just go to "/". For
+ * instance "/" goes to "/yarn/apps". This allows the filters to
+ * more easily differentiate the different webapps.
+ * @param path the path to redirect to
+ */
+ void setRedirectPath(String path) { this.redirectPath = path; }
+
+ public String getRedirectPath() { return this.redirectPath; }
+
void setHostClass(Class<?> cls) {
router.setHostClass(cls);
}
@@ -109,7 +129,10 @@ public abstract class WebApp extends Ser
@Override
public void configureServlets() {
setup();
- serve("/", "/__stop", StringHelper.join('/', name, '*')).with(Dispatcher.class);
+ serve("/", "/__stop").with(Dispatcher.class);
+ for (String path : this.servePathSpecs) {
+ serve(path).with(Dispatcher.class);
+ }
}
/**
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java Thu Sep 29 00:42:47 2011
@@ -113,6 +113,14 @@ public class WebApps {
};
}
webapp.setName(name);
+ String basePath = "/" + name;
+ webapp.setRedirectPath(basePath);
+ if (basePath.equals("/")) {
+ webapp.addServePathSpec("/*");
+ } else {
+ webapp.addServePathSpec(basePath);
+ webapp.addServePathSpec(basePath + "/*");
+ }
if (conf == null) {
conf = new Configuration();
}
@@ -142,7 +150,8 @@ public class WebApps {
}
}
HttpServer server =
- new HttpServer(name, bindAddress, port, findPort, conf);
+ new HttpServer(name, bindAddress, port, findPort, conf,
+ webapp.getServePathSpecs());
server.addGlobalFilter("guice", GuiceFilter.class.getName(), null);
webapp.setConf(conf);
webapp.setHttpServer(server);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java Thu Sep 29 00:42:47 2011
@@ -167,6 +167,16 @@ public class MockApps {
// TODO Auto-generated method stub
}
+ @Override
+ public long getFinishTime() {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+ @Override
+ public void setFinishTime(long finishTime) {
+ // TODO Auto-generated method stub
+
+ }
};
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java Thu Sep 29 00:42:47 2011
@@ -25,9 +25,11 @@ import junit.framework.Assert;
import org.apache.avro.ipc.Server;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.yarn.api.ClientRMProtocol;
import org.apache.hadoop.yarn.api.ContainerManager;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest;
@@ -47,6 +49,7 @@ import org.apache.hadoop.yarn.factory.pr
import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC;
import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.ipc.YarnRPC;
+import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
public class TestRPC {
@@ -66,6 +69,35 @@ public class TestRPC {
// }
@Test
+ public void testUnknownCall() {
+ Configuration conf = new Configuration();
+ conf.set(YarnConfiguration.IPC_RPC_IMPL, HadoopYarnProtoRPC.class
+ .getName());
+ YarnRPC rpc = YarnRPC.create(conf);
+ String bindAddr = "localhost:0";
+ InetSocketAddress addr = NetUtils.createSocketAddr(bindAddr);
+ Server server = rpc.getServer(ContainerManager.class,
+ new DummyContainerManager(), addr, conf, null, 1);
+ server.start();
+
+ // Any unrelated protocol would do
+ ClientRMProtocol proxy = (ClientRMProtocol) rpc.getProxy(
+ ClientRMProtocol.class, NetUtils.createSocketAddr("localhost:"
+ + server.getPort()), conf);
+
+ try {
+ proxy.getNewApplication(Records
+ .newRecord(GetNewApplicationRequest.class));
+ Assert.fail("Excepted RPC call to fail with unknown method.");
+ } catch (YarnRemoteException e) {
+ Assert.assertTrue(e.getMessage().matches(
+ "Unknown method getNewApplication called on.*"
+ + "org.apache.hadoop.yarn.proto.ClientRMProtocol"
+ + "\\$ClientRMProtocolService\\$BlockingInterface protocol."));
+ }
+ }
+
+ @Test
public void testHadoopProtoRPC() throws Exception {
test(HadoopYarnProtoRPC.class.getName());
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java?rev=1177130&r1=1177129&r2=1177130&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/TestWebApp.java Thu Sep 29 00:42:47 2011
@@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.webapp;
+import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.yarn.MockApps;
import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.WebApp;
@@ -148,6 +149,32 @@ public class TestWebApp {
app.stop();
}
+ @Test public void testServePaths() {
+ WebApp app = WebApps.$for("test", this).start();
+ assertEquals("/test", app.getRedirectPath());
+ String[] expectedPaths = { "/test", "/test/*" };
+ String[] pathSpecs = app.getServePathSpecs();
+
+ assertEquals(2, pathSpecs.length);
+ for(int i = 0; i < expectedPaths.length; i++) {
+ assertTrue(ArrayUtils.contains(pathSpecs, expectedPaths[i]));
+ }
+ app.stop();
+ }
+
+ @Test public void testServePathsNoName() {
+ WebApp app = WebApps.$for("", this).start();
+ assertEquals("/", app.getRedirectPath());
+ String[] expectedPaths = { "/*" };
+ String[] pathSpecs = app.getServePathSpecs();
+
+ assertEquals(1, pathSpecs.length);
+ for(int i = 0; i < expectedPaths.length; i++) {
+ assertTrue(ArrayUtils.contains(pathSpecs, expectedPaths[i]));
+ }
+ app.stop();
+ }
+
@Test public void testDefaultRoutes() throws Exception {
WebApp app = WebApps.$for("test", this).start();
String baseUrl = baseUrl(app);