You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by jl...@apache.org on 2013/06/04 03:50:16 UTC
svn commit: r1489271 - in /hadoop/common/trunk/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apac...
Author: jlowe
Date: Tue Jun 4 01:50:15 2013
New Revision: 1489271
URL: http://svn.apache.org/r1489271
Log:
MAPREDUCE-5283. Over 10 different tests have near identical implementations of AppContext. Committed by Sandy Ryza
Added:
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryContext.java
Modified:
hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Tue Jun 4 01:50:15 2013
@@ -279,6 +279,9 @@ Release 2.1.0-beta - UNRELEASED
MAPREDUCE-5300. Fix backward incompatibility for
o.a.h.mapreduce.filecache.DistributedCache. (Zhijie Shen via acmurthy)
+ MAPREDUCE-5283. Over 10 different tests have near identical
+ implementations of AppContext (Sandy Ryza via jlowe)
+
OPTIMIZATIONS
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java?rev=1489271&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java Tue Jun 4 01:50:15 2013
@@ -0,0 +1,118 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.app;
+
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.ClusterInfo;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+
+import com.google.common.collect.Maps;
+
+public class MockAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map<JobId, Job> jobs;
+ final long startTime = System.currentTimeMillis();
+
+ public MockAppContext(int appid) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
+ jobs = null;
+ }
+
+ public MockAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
+ Map<JobId, Job> map = Maps.newHashMap();
+ Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
+ map.put(job.getID(), job);
+ jobs = map;
+ }
+
+ public MockAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ this(appid, numJobs, numTasks, numAttempts, false);
+ }
+
+ public MockAppContext(int appid, int numJobs, int numTasks, int numAttempts,
+ boolean hasFailedTasks) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts, hasFailedTasks);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map<JobId, Job> getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+
+ @Override
+ public ClusterInfo getClusterInfo() {
+ return null;
+ }
+
+}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java Tue Jun 4 01:50:15 2013
@@ -28,16 +28,12 @@ import java.util.Map.Entry;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.WebApps;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test;
@@ -46,103 +42,34 @@ import com.google.inject.Injector;
public class TestAMWebApp {
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 1, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
-
@Test public void testAppControllerIndex() {
- TestAppContext ctx = new TestAppContext();
+ AppContext ctx = new MockAppContext(0, 1, 1, 1);
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
AppController controller = injector.getInstance(AppController.class);
controller.index();
- assertEquals(ctx.appID.toString(), controller.get(APP_ID,""));
+ assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
}
@Test public void testAppView() {
- WebAppTests.testPage(AppView.class, AppContext.class, new TestAppContext());
+ WebAppTests.testPage(AppView.class, AppContext.class, new MockAppContext(0, 1, 1, 1));
}
@Test public void testJobView() {
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getJobParams(appContext);
WebAppTests.testPage(JobPage.class, AppContext.class, appContext, params);
}
@Test public void testTasksView() {
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getTaskParams(appContext);
WebAppTests.testPage(TasksPage.class, AppContext.class, appContext, params);
}
@Test public void testTaskView() {
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getTaskParams(appContext);
App app = new App(appContext);
app.setJob(appContext.getAllJobs().values().iterator().next());
@@ -170,18 +97,18 @@ public class TestAMWebApp {
@Test public void testConfView() {
WebAppTests.testPage(JobConfPage.class, AppContext.class,
- new TestAppContext());
+ new MockAppContext(0, 1, 1, 1));
}
@Test public void testCountersView() {
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getJobParams(appContext);
WebAppTests.testPage(CountersPage.class, AppContext.class,
appContext, params);
}
@Test public void testSingleCounterView() {
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Job job = appContext.getAllJobs().values().iterator().next();
// add a failed task to the job without any counters
Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
@@ -196,14 +123,14 @@ public class TestAMWebApp {
}
@Test public void testTaskCountersView() {
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getTaskParams(appContext);
WebAppTests.testPage(CountersPage.class, AppContext.class,
appContext, params);
}
@Test public void testSingleTaskCounterView() {
- AppContext appContext = new TestAppContext(0, 1, 1, 2);
+ AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
@@ -222,7 +149,7 @@ public class TestAMWebApp {
}
public static void main(String[] args) {
- WebApps.$for("yarn", AppContext.class, new TestAppContext(0, 8, 88, 4)).
+ WebApps.$for("yarn", AppContext.class, new MockAppContext(0, 8, 88, 4)).
at(58888).inDevMode().start(new AMWebApp()).joinThread();
}
}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java Tue Jun 4 01:50:15 2013
@@ -23,22 +23,14 @@ import static org.junit.Assert.assertTru
import static org.junit.Assert.fail;
import java.io.StringReader;
-import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
-import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONException;
@@ -72,82 +64,13 @@ import com.sun.jersey.test.framework.Web
public class TestAMWebServices extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
-
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 1, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
+ private static AppContext appContext;
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockAppContext(0, 1, 1, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
@@ -318,7 +241,7 @@ public class TestAMWebServices extends J
}
}
- public void verifyAMInfo(JSONObject info, TestAppContext ctx)
+ public void verifyAMInfo(JSONObject info, AppContext ctx)
throws JSONException {
assertEquals("incorrect number of elements", 5, info.length());
@@ -327,7 +250,7 @@ public class TestAMWebServices extends J
info.getLong("elapsedTime"));
}
- public void verifyAMInfoXML(String xml, TestAppContext ctx)
+ public void verifyAMInfoXML(String xml, AppContext ctx)
throws JSONException, Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
@@ -348,7 +271,7 @@ public class TestAMWebServices extends J
}
}
- public void verifyAMInfoGeneric(TestAppContext ctx, String id, String user,
+ public void verifyAMInfoGeneric(AppContext ctx, String id, String user,
String name, long startedOn, long elapsedTime) {
WebServicesTestUtils.checkStringMatch("id", ctx.getApplicationID()
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java Tue Jun 4 01:50:15 2013
@@ -36,16 +36,11 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -82,82 +77,13 @@ import com.sun.jersey.test.framework.Web
public class TestAMWebServicesAttempts extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
-
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
+ private static AppContext appContext;
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java Tue Jun 4 01:50:15 2013
@@ -40,6 +40,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
@@ -81,79 +82,11 @@ import com.sun.jersey.test.framework.Web
public class TestAMWebServicesJobConf extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static AppContext appContext;
private static File testConfDir = new File("target",
TestAMWebServicesJobConf.class.getSimpleName() + "confDir");
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- Map<JobId, Job> map = Maps.newHashMap();
- Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
- map.put(job.getID(), job);
- jobs = map;
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
@@ -181,7 +114,7 @@ public class TestAMWebServicesJobConf ex
fail("error creating config file: " + e.getMessage());
}
- appContext = new TestAppContext(0, 2, 1, confPath);
+ appContext = new MockAppContext(0, 2, 1, confPath);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java Tue Jun 4 01:50:15 2013
@@ -38,16 +38,11 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.authorize.AccessControlList;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -85,82 +80,13 @@ import com.sun.jersey.test.framework.Web
public class TestAMWebServicesJobs extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
-
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
+ private static AppContext appContext;
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
@@ -545,7 +471,7 @@ public class TestAMWebServicesJobs exten
}
- public void verifyAMJobXML(NodeList nodes, TestAppContext appContext) {
+ public void verifyAMJobXML(NodeList nodes, AppContext appContext) {
assertEquals("incorrect number of elements", 1, nodes.getLength());
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java Tue Jun 4 01:50:15 2013
@@ -35,15 +35,10 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
@@ -79,82 +74,13 @@ import com.sun.jersey.test.framework.Web
public class TestAMWebServicesTasks extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
-
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
+ private static AppContext appContext;
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryContext.java?rev=1489271&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryContext.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/MockHistoryContext.java Tue Jun 4 01:50:15 2013
@@ -0,0 +1,112 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.hs;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+
+public class MockHistoryContext extends MockAppContext implements HistoryContext {
+
+ private final Map<JobId, Job> partialJobs;
+ private final Map<JobId, Job> fullJobs;
+
+ public MockHistoryContext(int numJobs, int numTasks, int numAttempts) {
+ super(0);
+ JobsPair jobs;
+ try {
+ jobs = MockHistoryJobs.newHistoryJobs(numJobs, numTasks, numAttempts);
+ } catch (IOException e) {
+ throw new YarnException(e);
+ }
+ partialJobs = jobs.partial;
+ fullJobs = jobs.full;
+ }
+
+ public MockHistoryContext(int appid, int numJobs, int numTasks,
+ int numAttempts) {
+ super(appid);
+ JobsPair jobs;
+ try {
+ jobs = MockHistoryJobs.newHistoryJobs(getApplicationID(), numJobs, numTasks,
+ numAttempts);
+ } catch (IOException e) {
+ throw new YarnException(e);
+ }
+ partialJobs = jobs.partial;
+ fullJobs = jobs.full;
+ }
+
+ public MockHistoryContext(int appid, int numTasks, int numAttempts, Path confPath) {
+ super(appid, numTasks, numAttempts, confPath);
+ fullJobs = super.getAllJobs();
+ partialJobs = null;
+ }
+
+ public MockHistoryContext(int appid, int numJobs, int numTasks, int numAttempts,
+ boolean hasFailedTasks) {
+ super(appid);
+ JobsPair jobs;
+ try {
+ jobs = MockHistoryJobs.newHistoryJobs(getApplicationID(), numJobs, numTasks,
+ numAttempts, hasFailedTasks);
+ } catch (IOException e) {
+ throw new YarnException(e);
+ }
+ partialJobs = jobs.partial;
+ fullJobs = jobs.full;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return fullJobs.get(jobID);
+ }
+
+ public Job getPartialJob(JobId jobID) {
+ return partialJobs.get(jobID);
+ }
+
+ @Override
+ public Map<JobId, Job> getAllJobs() {
+ return fullJobs;
+ }
+
+ @Override
+ public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+ return null;
+ }
+
+ @Override
+ public JobsInfo getPartialJobs(Long offset, Long count, String user,
+ String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+ JobState jobState) {
+ return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(),
+ offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
+ }
+
+}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java Tue Jun 4 01:50:15 2013
@@ -41,16 +41,11 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.MRApp;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
-import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.TestAMWebApp;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test;
@@ -61,92 +56,17 @@ import com.google.inject.Injector;
public class TestHSWebApp {
private static final Log LOG = LogFactory.getLog(TestHSWebApp.class);
- static class TestAppContext implements AppContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts,
- boolean hasFailedTasks) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts,
- hasFailedTasks);
- }
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- this(appid, numJobs, numTasks, numAttempts, false);
- }
-
- TestAppContext() {
- this(0, 1, 1, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
- }
-
@Test public void testAppControllerIndex() {
- TestAppContext ctx = new TestAppContext();
+ MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
HsController controller = injector.getInstance(HsController.class);
controller.index();
- assertEquals(ctx.appID.toString(), controller.get(APP_ID,""));
+ assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
}
@Test public void testJobView() {
LOG.info("HsJobPage");
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsJobPage.class, AppContext.class, appContext, params);
}
@@ -154,7 +74,7 @@ public class TestHSWebApp {
@Test
public void testTasksView() {
LOG.info("HsTasksPage");
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests.testPage(HsTasksPage.class, AppContext.class, appContext,
params);
@@ -163,7 +83,7 @@ public class TestHSWebApp {
@Test
public void testTaskView() {
LOG.info("HsTaskPage");
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests
.testPage(HsTaskPage.class, AppContext.class, appContext, params);
@@ -171,7 +91,7 @@ public class TestHSWebApp {
@Test public void testAttemptsWithJobView() {
LOG.info("HsAttemptsPage with data");
- TestAppContext ctx = new TestAppContext();
+ MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
JobId id = ctx.getAllJobs().keySet().iterator().next();
Map<String, String> params = new HashMap<String,String>();
params.put(JOB_ID, id.toString());
@@ -183,7 +103,7 @@ public class TestHSWebApp {
@Test public void testAttemptsView() {
LOG.info("HsAttemptsPage");
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class,
appContext, params);
@@ -192,18 +112,18 @@ public class TestHSWebApp {
@Test public void testConfView() {
LOG.info("HsConfPage");
WebAppTests.testPage(HsConfPage.class, AppContext.class,
- new TestAppContext());
+ new MockAppContext(0, 1, 1, 1));
}
@Test public void testAboutView() {
LOG.info("HsAboutPage");
WebAppTests.testPage(HsAboutPage.class, AppContext.class,
- new TestAppContext());
+ new MockAppContext(0, 1, 1, 1));
}
@Test public void testJobCounterView() {
LOG.info("JobCounterView");
- AppContext appContext = new TestAppContext();
+ AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
appContext, params);
@@ -211,7 +131,7 @@ public class TestHSWebApp {
@Test public void testJobCounterViewForKilledJob() {
LOG.info("JobCounterViewForKilledJob");
- AppContext appContext = new TestAppContext(0, 1, 1, 1, true);
+ AppContext appContext = new MockAppContext(0, 1, 1, 1, true);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
appContext, params);
@@ -220,7 +140,7 @@ public class TestHSWebApp {
@Test public void testSingleCounterView() {
LOG.info("HsSingleCounterPage");
WebAppTests.testPage(HsSingleCounterPage.class, AppContext.class,
- new TestAppContext());
+ new MockAppContext(0, 1, 1, 1));
}
@Test
@@ -228,7 +148,7 @@ public class TestHSWebApp {
LOG.info("HsLogsPage");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class,
- new TestAppContext());
+ new MockAppContext(0, 1, 1, 1));
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Cannot get container logs without a ContainerId");
verify(spyPw).write("Cannot get container logs without a NodeId");
@@ -238,7 +158,7 @@ public class TestHSWebApp {
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
- TestAppContext ctx = new TestAppContext();
+ MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
@@ -260,7 +180,7 @@ public class TestHSWebApp {
@Test
public void testLogsViewSingle() throws IOException {
LOG.info("HsLogsPage with params for single log and data limits");
- TestAppContext ctx = new TestAppContext();
+ MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
final Configuration conf = new YarnConfiguration();
@@ -295,7 +215,7 @@ public class TestHSWebApp {
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
- TestAppContext ctx = new TestAppContext();
+ MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java Tue Jun 4 01:50:15 2013
@@ -22,28 +22,18 @@ import static org.junit.Assert.assertEqu
import static org.junit.Assert.fail;
import java.io.StringReader;
-import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
-import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
-import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -77,97 +67,14 @@ import com.sun.jersey.test.framework.Web
public class TestHsWebServices extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static HistoryContext appContext;
private static HsWebApp webApp;
- static class TestAppContext implements HistoryContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 1, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
-
- @Override
- public Map<JobId, Job> getAllJobs(ApplicationId appID) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public JobsInfo getPartialJobs(Long offset, Long count, String user,
- String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
- JobState jobState) {
- // TODO Auto-generated method stub
- return null;
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockHistoryContext(0, 1, 1, 1);
JobHistory jobHistoryService = new JobHistory();
HistoryContext historyContext = (HistoryContext) jobHistoryService;
webApp = new HsWebApp(historyContext);
@@ -356,7 +263,7 @@ public class TestHsWebServices extends J
JobHistoryServer.historyServerTimeStamp, startedon);
}
- public void verifyHSInfo(JSONObject info, TestAppContext ctx)
+ public void verifyHSInfo(JSONObject info, AppContext ctx)
throws JSONException {
assertEquals("incorrect number of elements", 4, info.length());
@@ -365,7 +272,7 @@ public class TestHsWebServices extends J
info.getLong("startedOn"));
}
- public void verifyHSInfoXML(String xml, TestAppContext ctx)
+ public void verifyHSInfoXML(String xml, AppContext ctx)
throws JSONException, Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java Tue Jun 4 01:50:15 2013
@@ -35,22 +35,15 @@ import javax.xml.parsers.DocumentBuilder
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
-import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
@@ -89,97 +82,14 @@ import com.sun.jersey.test.framework.Web
public class TestHsWebServicesAttempts extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static HistoryContext appContext;
private static HsWebApp webApp;
- static class TestAppContext implements HistoryContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
-
- @Override
- public Map<JobId, Job> getAllJobs(ApplicationId appID) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public JobsInfo getPartialJobs(Long offset, Long count, String user,
- String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
- JobState jobState) {
- // TODO Auto-generated method stub
- return null;
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockHistoryContext(0, 1, 2, 1);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java Tue Jun 4 01:50:15 2013
@@ -41,18 +41,11 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
-import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -67,7 +60,6 @@ import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
-import com.google.common.collect.Maps;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
@@ -87,94 +79,12 @@ import com.sun.jersey.test.framework.Web
public class TestHsWebServicesJobConf extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static HistoryContext appContext;
private static HsWebApp webApp;
private static File testConfDir = new File("target",
TestHsWebServicesJobConf.class.getSimpleName() + "confDir");
- static class TestAppContext implements HistoryContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- Map<JobId, Job> map = Maps.newHashMap();
- Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
- map.put(job.getID(), job);
- jobs = map;
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
-
- @Override
- public Map<JobId, Job> getAllJobs(ApplicationId appID) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public JobsInfo getPartialJobs(Long offset, Long count, String user,
- String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
- JobState jobState) {
- // TODO Auto-generated method stub
- return null;
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
@@ -202,7 +112,7 @@ public class TestHsWebServicesJobConf ex
fail("error creating config file: " + e.getMessage());
}
- appContext = new TestAppContext(0, 2, 1, confPath);
+ appContext = new MockHistoryContext(0, 2, 1, confPath);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java Tue Jun 4 01:50:15 2013
@@ -27,7 +27,6 @@ import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-import java.io.IOException;
import java.io.StringReader;
import java.util.Map;
@@ -38,23 +37,12 @@ import javax.xml.parsers.DocumentBuilder
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
-import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
-import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
-import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.YarnException;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -91,116 +79,14 @@ import com.sun.jersey.test.framework.Web
public class TestHsWebServicesJobs extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static MockHistoryContext appContext;
private static HsWebApp webApp;
- static class TestAppContext implements HistoryContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> partialJobs;
- final Map<JobId, Job> fullJobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts,
- boolean hasFailedTasks) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- JobsPair jobs;
- try {
- jobs = MockHistoryJobs.newHistoryJobs(appID, numJobs, numTasks,
- numAttempts, hasFailedTasks);
- } catch (IOException e) {
- throw new YarnException(e);
- }
- partialJobs = jobs.partial;
- fullJobs = jobs.full;
- }
-
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- this(appid, numJobs, numTasks, numAttempts, false);
- }
-
- TestAppContext() {
- this(0, 1, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return fullJobs.get(jobID);
- }
-
- public Job getPartialJob(JobId jobID) {
- return partialJobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return partialJobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
-
- @Override
- public Map<JobId, Job> getAllJobs(ApplicationId appID) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public JobsInfo getPartialJobs(Long offset, Long count, String user,
- String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
- JobState jobState) {
- return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(),
- offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockHistoryContext(0, 1, 2, 1, false);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
@@ -312,7 +198,7 @@ public class TestHsWebServicesJobs exten
verifyHsJobPartialXML(job, appContext);
}
- public void verifyHsJobPartialXML(NodeList nodes, TestAppContext appContext) {
+ public void verifyHsJobPartialXML(NodeList nodes, MockHistoryContext appContext) {
assertEquals("incorrect number of elements", 1, nodes.getLength());
@@ -338,7 +224,7 @@ public class TestHsWebServicesJobs exten
}
}
- public void verifyHsJobXML(NodeList nodes, TestAppContext appContext) {
+ public void verifyHsJobXML(NodeList nodes, AppContext appContext) {
assertEquals("incorrect number of elements", 1, nodes.getLength());
@@ -640,7 +526,7 @@ public class TestHsWebServicesJobs exten
@Test
public void testJobCountersForKilledJob() throws Exception {
WebResource r = resource();
- appContext = new TestAppContext(0, 1, 1, 1, true);
+ appContext = new MockHistoryContext(0, 1, 1, 1, true);
injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java Tue Jun 4 01:50:15 2013
@@ -23,7 +23,6 @@ import static org.junit.Assert.assertTru
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -35,20 +34,10 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
-import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
-import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
-import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.YarnException;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -78,105 +67,14 @@ import com.sun.jersey.test.framework.Web
public class TestHsWebServicesJobsQuery extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static MockHistoryContext appContext;
private static HsWebApp webApp;
- static class TestAppContext implements HistoryContext {
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> fullJobs;
- final Map<JobId, Job> partialJobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int numJobs, int numTasks, int numAttempts) {
- JobsPair jobs;
- try {
- jobs = MockHistoryJobs.newHistoryJobs(numJobs, numTasks, numAttempts);
- } catch (IOException e) {
- throw new YarnException(e);
- }
- partialJobs = jobs.partial;
- fullJobs = jobs.full;
- }
-
- TestAppContext() {
- this(3, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return null;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return null;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return fullJobs.get(jobID);
- }
-
- public Job getPartialJob(JobId jobID) {
- return partialJobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return partialJobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
-
- @Override
- public Map<JobId, Job> getAllJobs(ApplicationId appID) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public JobsInfo getPartialJobs(Long offset, Long count, String user,
- String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
- JobState jobState) {
- return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(),
- offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockHistoryContext(3, 2, 1);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java?rev=1489271&r1=1489270&r2=1489271&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java Tue Jun 4 01:50:15 2013
@@ -34,21 +34,14 @@ import javax.xml.parsers.DocumentBuilder
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
-import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
-import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
-import org.apache.hadoop.yarn.Clock;
-import org.apache.hadoop.yarn.ClusterInfo;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@@ -85,97 +78,14 @@ import com.sun.jersey.test.framework.Web
public class TestHsWebServicesTasks extends JerseyTest {
private static Configuration conf = new Configuration();
- private static TestAppContext appContext;
+ private static MockHistoryContext appContext;
private static HsWebApp webApp;
- static class TestAppContext implements HistoryContext {
- final ApplicationAttemptId appAttemptID;
- final ApplicationId appID;
- final String user = MockJobs.newUserName();
- final Map<JobId, Job> jobs;
- final long startTime = System.currentTimeMillis();
-
- TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
- appID = MockJobs.newAppID(appid);
- appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
- jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
- }
-
- TestAppContext() {
- this(0, 1, 2, 1);
- }
-
- @Override
- public ApplicationAttemptId getApplicationAttemptId() {
- return appAttemptID;
- }
-
- @Override
- public ApplicationId getApplicationID() {
- return appID;
- }
-
- @Override
- public CharSequence getUser() {
- return user;
- }
-
- @Override
- public Job getJob(JobId jobID) {
- return jobs.get(jobID);
- }
-
- @Override
- public Map<JobId, Job> getAllJobs() {
- return jobs; // OK
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public EventHandler getEventHandler() {
- return null;
- }
-
- @Override
- public Clock getClock() {
- return null;
- }
-
- @Override
- public String getApplicationName() {
- return "TestApp";
- }
-
- @Override
- public long getStartTime() {
- return startTime;
- }
-
- @Override
- public ClusterInfo getClusterInfo() {
- return null;
- }
-
- @Override
- public Map<JobId, Job> getAllJobs(ApplicationId appID) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public JobsInfo getPartialJobs(Long offset, Long count, String user,
- String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
- JobState jobState) {
- // TODO Auto-generated method stub
- return null;
- }
- }
-
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
- appContext = new TestAppContext();
+ appContext = new MockHistoryContext(0, 1, 2, 1);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");