You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2012/04/11 07:47:51 UTC

svn commit: r1324567 [2/2] - in /hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project: ./ conf/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ hadoop-mapreduce-client/hadoop-mapreduce-clie...

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java Wed Apr 11 05:47:40 2012
@@ -32,10 +32,8 @@ import javax.ws.rs.core.UriInfo;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
-import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -49,6 +47,7 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
 import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
@@ -64,7 +63,7 @@ import com.google.inject.Inject;
 
 @Path("/ws/v1/history")
 public class HsWebServices {
-  private final AppContext appCtx;
+  private final HistoryContext ctx;
   private WebApp webapp;
   private final Configuration conf;
 
@@ -72,9 +71,9 @@ public class HsWebServices {
   UriInfo uriInfo;
 
   @Inject
-  public HsWebServices(final AppContext appCtx, final Configuration conf,
+  public HsWebServices(final HistoryContext ctx, final Configuration conf,
       final WebApp webapp) {
-    this.appCtx = appCtx;
+    this.ctx = ctx;
     this.conf = conf;
     this.webapp = webapp;
   }
@@ -103,33 +102,22 @@ public class HsWebServices {
       @QueryParam("startedTimeEnd") String startedEnd,
       @QueryParam("finishedTimeBegin") String finishBegin,
       @QueryParam("finishedTimeEnd") String finishEnd) {
-    JobsInfo allJobs = new JobsInfo();
-    long num = 0;
-    boolean checkCount = false;
-    boolean checkStart = false;
-    boolean checkEnd = false;
-    long countNum = 0;
-
-    // set values suitable in case both of begin/end not specified
-    long sBegin = 0;
-    long sEnd = Long.MAX_VALUE;
-    long fBegin = 0;
-    long fEnd = Long.MAX_VALUE;
 
+    Long countParam = null;
+    
     if (count != null && !count.isEmpty()) {
-      checkCount = true;
       try {
-        countNum = Long.parseLong(count);
+        countParam = Long.parseLong(count);
       } catch (NumberFormatException e) {
         throw new BadRequestException(e.getMessage());
       }
-      if (countNum <= 0) {
+      if (countParam <= 0) {
         throw new BadRequestException("limit value must be greater then 0");
       }
     }
 
+    Long sBegin = null;
     if (startedBegin != null && !startedBegin.isEmpty()) {
-      checkStart = true;
       try {
         sBegin = Long.parseLong(startedBegin);
       } catch (NumberFormatException e) {
@@ -139,8 +127,9 @@ public class HsWebServices {
         throw new BadRequestException("startedTimeBegin must be greater than 0");
       }
     }
+    
+    Long sEnd = null;
     if (startedEnd != null && !startedEnd.isEmpty()) {
-      checkStart = true;
       try {
         sEnd = Long.parseLong(startedEnd);
       } catch (NumberFormatException e) {
@@ -150,13 +139,13 @@ public class HsWebServices {
         throw new BadRequestException("startedTimeEnd must be greater than 0");
       }
     }
-    if (sBegin > sEnd) {
+    if (sBegin != null && sEnd != null && sBegin > sEnd) {
       throw new BadRequestException(
           "startedTimeEnd must be greater than startTimeBegin");
     }
 
+    Long fBegin = null;
     if (finishBegin != null && !finishBegin.isEmpty()) {
-      checkEnd = true;
       try {
         fBegin = Long.parseLong(finishBegin);
       } catch (NumberFormatException e) {
@@ -166,8 +155,8 @@ public class HsWebServices {
         throw new BadRequestException("finishedTimeBegin must be greater than 0");
       }
     }
+    Long fEnd = null;
     if (finishEnd != null && !finishEnd.isEmpty()) {
-      checkEnd = true;
       try {
         fEnd = Long.parseLong(finishEnd);
       } catch (NumberFormatException e) {
@@ -177,53 +166,18 @@ public class HsWebServices {
         throw new BadRequestException("finishedTimeEnd must be greater than 0");
       }
     }
-    if (fBegin > fEnd) {
+    if (fBegin != null && fEnd != null && fBegin > fEnd) {
       throw new BadRequestException(
           "finishedTimeEnd must be greater than finishedTimeBegin");
     }
-
-    for (Job job : appCtx.getAllJobs().values()) {
-      if (checkCount && num == countNum) {
-        break;
-      }
-
-      if (stateQuery != null && !stateQuery.isEmpty()) {
-        JobState.valueOf(stateQuery);
-        if (!job.getState().toString().equalsIgnoreCase(stateQuery)) {
-          continue;
-        }
-      }
-
-      // can't really validate queue is a valid one since queues could change
-      if (queueQuery != null && !queueQuery.isEmpty()) {
-        if (!job.getQueueName().equals(queueQuery)) {
-          continue;
-        }
-      }
-
-      if (userQuery != null && !userQuery.isEmpty()) {
-        if (!job.getUserName().equals(userQuery)) {
-          continue;
-        }
-      }
-
-      JobReport report = job.getReport();
-      
-      if (checkStart
-          && (report.getStartTime() < sBegin || report.getStartTime() > sEnd)) {
-        continue;
-      }
-      if (checkEnd
-          && (report.getFinishTime() < fBegin || report.getFinishTime() > fEnd)) {
-        continue;
-      }
-      
-      JobInfo jobInfo = new JobInfo(job);
-      
-      allJobs.add(jobInfo);
-      num++;
+    
+    JobState jobState = null;
+    if (stateQuery != null) {
+      jobState = JobState.valueOf(stateQuery);
     }
-    return allJobs;
+
+    return ctx.getPartialJobs(0l, countParam, userQuery, queueQuery, 
+        sBegin, sEnd, fBegin, fEnd, jobState);
   }
 
   @GET
@@ -231,7 +185,7 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public JobInfo getJob(@PathParam("jobid") String jid) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     return new JobInfo(job);
   }
 
@@ -240,7 +194,7 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     AMAttemptsInfo amAttempts = new AMAttemptsInfo();
     for (AMInfo amInfo : job.getAMInfos()) {
       AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job
@@ -256,8 +210,8 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
-    return new JobCounterInfo(this.appCtx, job);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
+    return new JobCounterInfo(this.ctx, job);
   }
 
   @GET
@@ -265,7 +219,7 @@ public class HsWebServices {
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public ConfInfo getJobConf(@PathParam("jobid") String jid) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     ConfInfo info;
     try {
       info = new ConfInfo(job, this.conf);
@@ -282,7 +236,7 @@ public class HsWebServices {
   public TasksInfo getJobTasks(@PathParam("jobid") String jid,
       @QueryParam("type") String type) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     TasksInfo allTasks = new TasksInfo();
     for (Task task : job.getTasks().values()) {
       TaskType ttype = null;
@@ -307,7 +261,7 @@ public class HsWebServices {
   public TaskInfo getJobTask(@PathParam("jobid") String jid,
       @PathParam("taskid") String tid) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
     return new TaskInfo(task);
 
@@ -319,7 +273,7 @@ public class HsWebServices {
   public JobTaskCounterInfo getSingleTaskCounters(
       @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     TaskId taskID = MRApps.toTaskID(tid);
     if (taskID == null) {
       throw new NotFoundException("taskid " + tid + " not found or invalid");
@@ -338,7 +292,7 @@ public class HsWebServices {
       @PathParam("taskid") String tid) {
 
     TaskAttemptsInfo attempts = new TaskAttemptsInfo();
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
     for (TaskAttempt ta : task.getAttempts().values()) {
       if (ta != null) {
@@ -358,7 +312,7 @@ public class HsWebServices {
   public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
       @PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
     TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
         task);
@@ -376,7 +330,7 @@ public class HsWebServices {
       @PathParam("jobid") String jid, @PathParam("taskid") String tid,
       @PathParam("attemptid") String attId) {
 
-    Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+    Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
     Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
     TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
         task);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java Wed Apr 11 05:47:40 2012
@@ -92,6 +92,14 @@ public class TestJobHistoryParsing {
     checkHistoryParsing(3, 0, 2);
   }
   
+  private static String getJobSummary(FileContext fc, Path path) throws IOException {
+    Path qPath = fc.makeQualified(path);
+    FSDataInputStream in = fc.open(qPath);
+    String jobSummaryString = in.readUTF();
+    in.close();
+    return jobSummaryString;
+  }
+  
   private void checkHistoryParsing(final int numMaps, final int numReduces,
       final int numSuccessfulMaps) 
   throws Exception {
@@ -244,7 +252,7 @@ public class TestJobHistoryParsing {
       String summaryFileName = JobHistoryUtils
           .getIntermediateSummaryFileName(jobId);
       Path summaryFile = new Path(jobhistoryDir, summaryFileName);
-      String jobSummaryString = jobHistory.getJobSummary(fc, summaryFile);
+      String jobSummaryString = getJobSummary(fc, summaryFile);
       Assert.assertTrue(jobSummaryString.contains("resourcesPerMap=100"));
       Assert.assertTrue(jobSummaryString.contains("resourcesPerReduce=100"));
       Assert.assertNotNull(jobSummaryString);
@@ -259,6 +267,8 @@ public class TestJobHistoryParsing {
 
       Assert.assertEquals("JobId does not match", jobId.toString(),
           jobSummaryElements.get("jobId"));
+      Assert.assertEquals("JobName does not match", "test",
+          jobSummaryElements.get("jobName"));
       Assert.assertTrue("submitTime should not be 0",
           Long.parseLong(jobSummaryElements.get("submitTime")) != 0);
       Assert.assertTrue("launchTime should not be 0",

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java Wed Apr 11 05:47:40 2012
@@ -30,11 +30,13 @@ import javax.xml.parsers.DocumentBuilder
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
 import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.ClusterInfo;
@@ -77,7 +79,7 @@ public class TestHsWebServices extends J
   private static TestAppContext appContext;
   private static HsWebApp webApp;
 
-  static class TestAppContext implements AppContext {
+  static class TestAppContext implements HistoryContext {
     final ApplicationAttemptId appAttemptID;
     final ApplicationId appID;
     final String user = MockJobs.newUserName();
@@ -144,6 +146,20 @@ public class TestHsWebServices extends J
     public ClusterInfo getClusterInfo() {
       return null;
     }
+
+    @Override
+    public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public JobsInfo getPartialJobs(Long offset, Long count, String user,
+        String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+        JobState jobState) {
+      // TODO Auto-generated method stub
+      return null;
+    }
   }
 
   private Injector injector = Guice.createInjector(new ServletModule() {
@@ -160,6 +176,7 @@ public class TestHsWebServices extends J
       bind(GenericExceptionHandler.class);
       bind(WebApp.class).toInstance(webApp);
       bind(AppContext.class).toInstance(appContext);
+      bind(HistoryContext.class).toInstance(appContext);
       bind(Configuration.class).toInstance(conf);
 
       serve("/*").with(GuiceContainer.class);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java Wed Apr 11 05:47:40 2012
@@ -35,6 +35,7 @@ import javax.xml.parsers.DocumentBuilder
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
@@ -42,6 +43,8 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.ClusterInfo;
@@ -89,7 +92,7 @@ public class TestHsWebServicesAttempts e
   private static TestAppContext appContext;
   private static HsWebApp webApp;
 
-  static class TestAppContext implements AppContext {
+  static class TestAppContext implements HistoryContext {
     final ApplicationAttemptId appAttemptID;
     final ApplicationId appID;
     final String user = MockJobs.newUserName();
@@ -156,6 +159,20 @@ public class TestHsWebServicesAttempts e
     public ClusterInfo getClusterInfo() {
       return null;
     }
+
+    @Override
+    public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public JobsInfo getPartialJobs(Long offset, Long count, String user,
+        String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+        JobState jobState) {
+      // TODO Auto-generated method stub
+      return null;
+    }
   }
 
   private Injector injector = Guice.createInjector(new ServletModule() {
@@ -171,6 +188,7 @@ public class TestHsWebServicesAttempts e
       bind(GenericExceptionHandler.class);
       bind(WebApp.class).toInstance(webApp);
       bind(AppContext.class).toInstance(appContext);
+      bind(HistoryContext.class).toInstance(appContext);
       bind(Configuration.class).toInstance(conf);
 
       serve("/*").with(GuiceContainer.class);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java Wed Apr 11 05:47:40 2012
@@ -41,9 +41,12 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.ClusterInfo;
@@ -90,7 +93,7 @@ public class TestHsWebServicesJobConf ex
   private static File testConfDir = new File("target",
       TestHsWebServicesJobConf.class.getSimpleName() + "confDir");
 
-  static class TestAppContext implements AppContext {
+  static class TestAppContext implements HistoryContext {
     final ApplicationAttemptId appAttemptID;
     final ApplicationId appID;
     final String user = MockJobs.newUserName();
@@ -156,6 +159,20 @@ public class TestHsWebServicesJobConf ex
     public ClusterInfo getClusterInfo() {
       return null;
     }
+
+    @Override
+    public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public JobsInfo getPartialJobs(Long offset, Long count, String user,
+        String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+        JobState jobState) {
+      // TODO Auto-generated method stub
+      return null;
+    }
   }
 
   private Injector injector = Guice.createInjector(new ServletModule() {
@@ -195,6 +212,7 @@ public class TestHsWebServicesJobConf ex
       bind(GenericExceptionHandler.class);
       bind(WebApp.class).toInstance(webApp);
       bind(AppContext.class).toInstance(appContext);
+      bind(HistoryContext.class).toInstance(appContext);
       bind(Configuration.class).toInstance(conf);
 
       serve("/*").with(GuiceContainer.class);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java Wed Apr 11 05:47:40 2012
@@ -38,11 +38,15 @@ import javax.xml.parsers.DocumentBuilder
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
 import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
 import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.ClusterInfo;
@@ -90,7 +94,7 @@ public class TestHsWebServicesJobs exten
   private static TestAppContext appContext;
   private static HsWebApp webApp;
 
-  static class TestAppContext implements AppContext {
+  static class TestAppContext implements HistoryContext {
     final ApplicationAttemptId appAttemptID;
     final ApplicationId appID;
     final String user = MockJobs.newUserName();
@@ -169,6 +173,20 @@ public class TestHsWebServicesJobs exten
     public ClusterInfo getClusterInfo() {
       return null;
     }
+
+    @Override
+    public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public JobsInfo getPartialJobs(Long offset, Long count, String user,
+        String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+        JobState jobState) {
+      return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(), 
+          offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
+    }
   }
 
   private Injector injector = Guice.createInjector(new ServletModule() {
@@ -184,6 +202,7 @@ public class TestHsWebServicesJobs exten
       bind(GenericExceptionHandler.class);
       bind(WebApp.class).toInstance(webApp);
       bind(AppContext.class).toInstance(appContext);
+      bind(HistoryContext.class).toInstance(appContext);
       bind(Configuration.class).toInstance(conf);
 
       serve("/*").with(GuiceContainer.class);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java Wed Apr 11 05:47:40 2012
@@ -36,8 +36,11 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
 import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
 import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.ClusterInfo;
@@ -77,7 +80,7 @@ public class TestHsWebServicesJobsQuery 
   private static TestAppContext appContext;
   private static HsWebApp webApp;
 
-  static class TestAppContext implements AppContext {
+  static class TestAppContext implements HistoryContext {
     final String user = MockJobs.newUserName();
     final Map<JobId, Job> fullJobs;
     final Map<JobId, Job> partialJobs;
@@ -152,6 +155,20 @@ public class TestHsWebServicesJobsQuery 
     public ClusterInfo getClusterInfo() {
       return null;
     }
+
+    @Override
+    public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public JobsInfo getPartialJobs(Long offset, Long count, String user,
+        String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+        JobState jobState) {
+      return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(), 
+          offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
+    }
   }
 
   private Injector injector = Guice.createInjector(new ServletModule() {
@@ -167,6 +184,7 @@ public class TestHsWebServicesJobsQuery 
       bind(GenericExceptionHandler.class);
       bind(WebApp.class).toInstance(webApp);
       bind(AppContext.class).toInstance(appContext);
+      bind(HistoryContext.class).toInstance(appContext);
       bind(Configuration.class).toInstance(conf);
 
       serve("/*").with(GuiceContainer.class);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java Wed Apr 11 05:47:40 2012
@@ -34,12 +34,15 @@ import javax.xml.parsers.DocumentBuilder
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.Clock;
 import org.apache.hadoop.yarn.ClusterInfo;
@@ -85,7 +88,7 @@ public class TestHsWebServicesTasks exte
   private static TestAppContext appContext;
   private static HsWebApp webApp;
 
-  static class TestAppContext implements AppContext {
+  static class TestAppContext implements HistoryContext {
     final ApplicationAttemptId appAttemptID;
     final ApplicationId appID;
     final String user = MockJobs.newUserName();
@@ -152,6 +155,20 @@ public class TestHsWebServicesTasks exte
     public ClusterInfo getClusterInfo() {
       return null;
     }
+
+    @Override
+    public Map<JobId, Job> getAllJobs(ApplicationId appID) {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public JobsInfo getPartialJobs(Long offset, Long count, String user,
+        String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
+        JobState jobState) {
+      // TODO Auto-generated method stub
+      return null;
+    }
   }
 
   private Injector injector = Guice.createInjector(new ServletModule() {
@@ -167,6 +184,7 @@ public class TestHsWebServicesTasks exte
       bind(GenericExceptionHandler.class);
       bind(WebApp.class).toInstance(webApp);
       bind(AppContext.class).toInstance(appContext);
+      bind(HistoryContext.class).toInstance(appContext);
       bind(Configuration.class).toInstance(conf);
 
       serve("/*").with(GuiceContainer.class);

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java Wed Apr 11 05:47:40 2012
@@ -18,34 +18,63 @@
 package org.apache.hadoop.util;
 
 
+import java.io.BufferedInputStream;
 import java.io.File;
-import org.apache.hadoop.fs.Path;
-import org.junit.Ignore;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.jar.JarOutputStream;
+import java.util.zip.ZipEntry;
 
-import junit.framework.TestCase;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
 
 /**
  * A test to rest the RunJar class.
  */
-@Ignore
-public class TestRunJar extends TestCase {
-  
+public class TestRunJar {
+
   private static String TEST_ROOT_DIR = new Path(System.getProperty(
       "test.build.data", "/tmp")).toString();
-  
+
+  private static final String TEST_JAR_NAME = "testjar.jar";
+  private static final String CLASS_NAME = "Hello.class";
+
+  @Test
   public void testRunjar() throws Throwable {
-  
-   File outFile = new File(TEST_ROOT_DIR, "out");
-     // delete if output file already exists.
+    File outFile = new File(TEST_ROOT_DIR, "out");
+    // delete if output file already exists.
     if (outFile.exists()) {
       outFile.delete();
     }
-    
+    File makeTestJar = makeTestJar();
+
     String[] args = new String[3];
-    args[0] = "build/test/mapred/testjar/testjob.jar";
-    args[1] = "testjar.Hello";
+    args[0] = makeTestJar.getAbsolutePath();
+    args[1] = "org.apache.hadoop.util.Hello";
     args[2] = outFile.toString();
     RunJar.main(args);
-    assertTrue("RunJar failed", outFile.exists());
+    Assert.assertTrue("RunJar failed", outFile.exists());
+  }
+
+  private File makeTestJar() throws IOException {
+    File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME);
+    JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile));
+    InputStream entryInputStream = this.getClass().getResourceAsStream(
+        CLASS_NAME);
+    ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME);
+    jstream.putNextEntry(entry);
+    BufferedInputStream bufInputStream = new BufferedInputStream(
+        entryInputStream, 2048);
+    int count;
+    byte[] data = new byte[2048];
+    while ((count = bufInputStream.read(data, 0, 2048)) != -1) {
+      jstream.write(data, 0, count);
+    }
+    jstream.closeEntry();
+    jstream.close();
+
+    return jarFile;
   }
-}
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/bin/yarn
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/bin/yarn?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/bin/yarn (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/bin/yarn Wed Apr 11 05:47:40 2012
@@ -225,6 +225,7 @@ YARN_OPTS="$YARN_OPTS -Dyarn.log.dir=$YA
 YARN_OPTS="$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE"
 YARN_OPTS="$YARN_OPTS -Dyarn.log.file=$YARN_LOGFILE"
 YARN_OPTS="$YARN_OPTS -Dyarn.home.dir=$YARN_HOME"
+YARN_OPTS="$YARN_OPTS -Dhadoop.home.dir=$YARN_HOME"
 YARN_OPTS="$YARN_OPTS -Dhadoop.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
 YARN_OPTS="$YARN_OPTS -Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
 if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java Wed Apr 11 05:47:40 2012
@@ -78,7 +78,6 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.hadoop.yarn.util.Records;
 
@@ -506,22 +505,20 @@ public class Client {
     // It should be provided out of the box. 
     // For now setting all required classpaths including
     // the classpath to "." for the application jar
-    String classPathEnv = "${CLASSPATH}"
-        + ":./*"
-        + ":$HADOOP_CONF_DIR"
-        + ":$HADOOP_COMMON_HOME/share/hadoop/common/*"
-        + ":$HADOOP_COMMON_HOME/share/hadoop/common/lib/*"
-        + ":$HADOOP_HDFS_HOME/share/hadoop/hdfs/*"
-        + ":$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*"
-        + ":$YARN_HOME/modules/*"
-        + ":$YARN_HOME/lib/*"
-        + ":./log4j.properties:";
+    StringBuilder classPathEnv = new StringBuilder("${CLASSPATH}:./*");
+    for (String c : conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH)
+        .split(",")) {
+      classPathEnv.append(':');
+      classPathEnv.append(c.trim());
+    }
+    classPathEnv.append(":./log4j.properties");
 
-    // add the runtime classpath needed for tests to work 
+    // add the runtime classpath needed for tests to work
     String testRuntimeClassPath = Client.getTestRuntimeClasspath();
-    classPathEnv += ":" + testRuntimeClassPath; 
+    classPathEnv.append(':');
+    classPathEnv.append(testRuntimeClassPath);
 
-    env.put("CLASSPATH", classPathEnv);
+    env.put("CLASSPATH", classPathEnv.toString());
 
     amContainer.setEnvironment(env);
 

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java Wed Apr 11 05:47:40 2012
@@ -31,6 +31,9 @@ import org.apache.hadoop.net.DNSToSwitch
 import org.apache.hadoop.net.Node;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.net.ScriptBasedMapping;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import com.google.common.annotations.VisibleForTesting;
 
 public class RackResolver {
   private static DNSToSwitchMapping dnsToSwitchMapping;
@@ -49,10 +52,8 @@ public class RackResolver {
         ScriptBasedMapping.class,
         DNSToSwitchMapping.class);
     try {
-      Constructor<? extends DNSToSwitchMapping> dnsToSwitchMappingConstructor
-                             = dnsToSwitchMappingClass.getConstructor();
-      DNSToSwitchMapping newInstance =
-          dnsToSwitchMappingConstructor.newInstance();
+      DNSToSwitchMapping newInstance = ReflectionUtils.newInstance(
+          dnsToSwitchMappingClass, conf);
       // Wrap around the configured class with the Cached implementation so as
       // to save on repetitive lookups.
       // Check if the impl is already caching, to avoid double caching.
@@ -99,4 +100,12 @@ public class RackResolver {
     LOG.info("Resolved " + hostName + " to " + rName);
     return new NodeBase(hostName, rName);
   }
+
+  /**
+   * Only used by tests
+   */
+  @VisibleForTesting
+  static DNSToSwitchMapping getDnsToSwitchMapping(){
+    return dnsToSwitchMapping;
+  }
 }

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java Wed Apr 11 05:47:40 2012
@@ -52,7 +52,7 @@ public class TestRackResolver {
       Assert.assertTrue(numHost1 <= 1);
       return returnList;
     }
-    
+
   }
 
   @Test

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/c++/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/c++:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/block_forensics/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/build.xml
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/data_join/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/index/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:r1310902-1324566

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java Wed Apr 11 05:47:40 2012
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.ChecksumExce
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
+import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.net.SocketOutputStream;
 import org.apache.hadoop.util.DataChecksum;
@@ -441,9 +442,9 @@ public class RaidBlockSender implements 
   
   private static class BlockInputStreamFactory implements InputStreamFactory {
     private final ExtendedBlock block;
-    private final FSDatasetInterface data;
+    private final FsDatasetSpi<?> data;
 
-    private BlockInputStreamFactory(ExtendedBlock block, FSDatasetInterface data) {
+    private BlockInputStreamFactory(ExtendedBlock block, FsDatasetSpi<?> data) {
       this.block = block;
       this.data = data;
     }

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java Wed Apr 11 05:47:40 2012
@@ -22,6 +22,7 @@ import java.io.*;
 import org.apache.hadoop.classification.*;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.hdfs.protocol.*;
+import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.security.AccessControlException;
 
 /** Utilities used by RAID for accessing NameNode. */
@@ -35,10 +36,11 @@ public class NameNodeRaidUtil {
     return dir.getFileInfo(src, resolveLink);
   }
 
-  /** Accessing FSNamesystem.getFileInfo(..) */
+  /** Accessing FSNamesystem.getFileInfo(..) 
+ * @throws StandbyException */
   public static HdfsFileStatus getFileInfo(final FSNamesystem namesystem,
       final String src, final boolean resolveLink
-      ) throws AccessControlException, UnresolvedLinkException {
+      ) throws AccessControlException, UnresolvedLinkException, StandbyException {
     return namesystem.getFileInfo(src, resolveLink);
   }
 

Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java?rev=1324567&r1=1324566&r2=1324567&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java Wed Apr 11 05:47:40 2012
@@ -622,8 +622,8 @@ public abstract class BlockFixer extends
         int idx = rand.nextInt(live.length);
         chosen = live[idx];
         for (DatanodeInfo avoid: locationsToAvoid) {
-          if (chosen.name.equals(avoid.name)) {
-            LOG.info("Avoiding " + avoid.name);
+          if (chosen.getName().equals(avoid.getName())) {
+            LOG.info("Avoiding " + avoid.getName());
             chosen = null;
             break;
           }
@@ -632,7 +632,7 @@ public abstract class BlockFixer extends
       if (chosen == null) {
         throw new IOException("Could not choose datanode");
       }
-      LOG.info("Choosing datanode " + chosen.name);
+      LOG.info("Choosing datanode " + chosen.getName());
       return chosen;
     }
 
@@ -736,7 +736,7 @@ public abstract class BlockFixer extends
                                 DataInputStream metadataIn,
                                 LocatedBlock block, long blockSize)
       throws IOException {
-      InetSocketAddress target = NetUtils.createSocketAddr(datanode.name);
+      InetSocketAddress target = NetUtils.createSocketAddr(datanode.getName());
       Socket sock = SocketChannel.open().socket();
       
       int readTimeout =
@@ -785,7 +785,7 @@ public abstract class BlockFixer extends
             1, 0L, blockSize, 0L, DataChecksum.newDataChecksum(metadataIn));
         blockSender.sendBlock(out, baseStream);
         
-        LOG.info("Sent block " + block.getBlock() + " to " + datanode.name);
+        LOG.info("Sent block " + block.getBlock() + " to " + datanode.getName());
       } finally {
         out.close();
       }

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/contrib/vaidya/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/examples/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/examples:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/java:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/test/mapred/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:r1310902-1324566

Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/src/webapps/job/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:r1310902-1324566