You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by at...@apache.org on 2012/02/28 03:21:44 UTC
svn commit: r1294445 - in
/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/
hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/
hadoop-mapreduce-client/hadoop-mapreduce-client-cor...
Author: atm
Date: Tue Feb 28 02:21:42 2012
New Revision: 1294445
URL: http://svn.apache.org/viewvc?rev=1294445&view=rev
Log:
Merge trunk into HA branch.
Added:
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
- copied unchanged from r1294443, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist
- copied unchanged from r1294443, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001-1329348443227-user-Sleep+job-1329348468601-10-1-SUCCEEDED-default.jhist
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
- copied unchanged from r1294443, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
Modified:
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/c++/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/block_forensics/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build-contrib.xml (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build.xml (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/data_join/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/eclipse-plugin/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/index/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/vaidya/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/ (props changed)
hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/webapps/job/ (props changed)
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred:713112
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt Tue Feb 28 02:21:42 2012
@@ -4,8 +4,6 @@ Trunk (unreleased changes)
INCOMPATIBLE CHANGES
- MAPREDUCE-3545. Remove Avro RPC. (suresh)
-
NEW FEATURES
MAPREDUCE-778. Rumen Anonymizer. (Amar Kamat and Chris Douglas via amarrk)
@@ -32,12 +30,6 @@ Trunk (unreleased changes)
MAPREDUCE-3008. Improvements to cumulative CPU emulation for short running
tasks in Gridmix. (amarrk)
- MAPREDUCE-2887 due to HADOOP-7524 Change RPC to allow multiple protocols
- including multuple versions of the same protocol (sanjay Radia)
-
- MAPREDUCE-2934. MR portion of HADOOP-7607 - Simplify the RPC proxy cleanup
- process (atm)
-
MAPREDUCE-2836. Provide option to fail jobs when submitted to non-existent
fair scheduler pools. (Ahmed Radwan via todd)
@@ -50,14 +42,8 @@ Trunk (unreleased changes)
MAPREDUCE-3169. Create a new MiniMRCluster equivalent which only provides
client APIs cross MR1 and MR2 (Ahmed via tucu)
- HADOOP-7862 MR changes to work with HADOOP 7862:
- Move the support for multiple protocols to lower layer so that Writable,
- PB and Avro can all use it (Sanjay)
-
MAPREDUCE-2944. Improve checking of input for JobClient.displayTasks() (XieXianshan via harsh)
- MAPREDUCE-3909 Javadoc the Service interfaces (stevel)
-
BUG FIXES
MAPREDUCE-3757. [Rumen] Fixed Rumen Folder to adjust shuffleFinished and
@@ -89,24 +75,41 @@ Trunk (unreleased changes)
MAPREDUCE-3664. Federation Documentation has incorrect configuration example.
(Brandon Li via jitendra)
- MAPREDUCE-3740. Fixed broken mapreduce compilation after the patch for
- HADOOP-7965. (Devaraj K via vinodkv)
-
- MAPREDUCE-3818. Fixed broken compilation in TestSubmitJob after the patch
- for HDFS-2895. (Suresh Srinivas via vinodkv)
-
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
+ MAPREDUCE-3545. Remove Avro RPC. (suresh)
+
NEW FEATURES
IMPROVEMENTS
+ MAPREDUCE-2887. Due to HADOOP-7524, change RPC to allow multiple protocols
+ including multuple versions of the same protocol (Sanjay Radia)
+
+ MAPREDUCE-2934. MR portion of HADOOP-7607 - Simplify the RPC proxy cleanup
+ process (atm)
+
+ HADOOP-7862. MR changes to work with HADOOP 7862: Move the support for
+ multiple protocols to lower layer so that Writable, PB and Avro can all
+ use it (Sanjay Radia)
+
+ MAPREDUCE-3909 Javadoc the Service interfaces (stevel)
+
OPTIMIZATIONS
BUG FIXES
+ MAPREDUCE-3740. Fixed broken mapreduce compilation after the patch for
+ HADOOP-7965. (Devaraj K via vinodkv)
+
+ MAPREDUCE-3818. Fixed broken compilation in TestSubmitJob after the patch
+ for HDFS-2895. (Suresh Srinivas via vinodkv)
+
+ MAPREDUCE-2942. TestNMAuditLogger.testNMAuditLoggerWithIP failing (Thomas
+ Graves via mahadev)
+
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -135,6 +138,12 @@ Release 0.23.2 - UNRELEASED
OPTIMIZATIONS
+ MAPREDUCE-3901. Modified JobHistory records in YARN to lazily load job and
+ task reports so as to improve UI response times. (Siddarth Seth via vinodkv)
+
+ MAPREDUCE-2855. Passing a cached class-loader to ResourceBundle creator to
+ minimize counter names lookup time. (Siddarth Seth via vinodkv)
+
BUG FIXES
MAPREDUCE-3918 proc_historyserver no longer in command line arguments for
HistoryServer (Jon Eagles via bobby)
@@ -2270,9 +2279,6 @@ Release 0.23.0 - 2011-11-01
MAPREDUCE-2908. Fix all findbugs warnings. (vinodkv via acmurthy)
- MAPREDUCE-2942. TestNMAuditLogger.testNMAuditLoggerWithIP failing (Thomas Graves
- via mahadev)
-
MAPREDUCE-2947. Fixed race condition in AuxiliaryServices. (vinodkv via
acmurthy)
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:1161333-1294443
/hadoop/core/branches/branch-0.19/mapred/CHANGES.txt:713112
/hadoop/mapreduce/branches/HDFS-641/CHANGES.txt:817878-835964
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/conf:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/conf:713112
/hadoop/core/trunk/conf:784664-785643
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java Tue Feb 28 02:21:42 2012
@@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
public class MRBuilderUtils {
@@ -41,6 +42,11 @@ public class MRBuilderUtils {
return jobId;
}
+ public static JobId newJobId(long clusterTs, int appIdInt, int id) {
+ ApplicationId appId = BuilderUtils.newApplicationId(clusterTs, appIdInt);
+ return MRBuilderUtils.newJobId(appId, id);
+ }
+
public static TaskId newTaskId(JobId jobId, int id, TaskType taskType) {
TaskId taskId = Records.newRecord(TaskId.class);
taskId.setJobId(jobId);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceBundles.java Tue Feb 28 02:21:42 2012
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.util;
+import java.util.Locale;
import java.util.ResourceBundle;
import java.util.MissingResourceException;
@@ -33,7 +34,8 @@ public class ResourceBundles {
* @throws MissingResourceException
*/
public static ResourceBundle getBundle(String bundleName) {
- return ResourceBundle.getBundle(bundleName.replace('$', '_'));
+ return ResourceBundle.getBundle(bundleName.replace('$', '_'),
+ Locale.getDefault(), Thread.currentThread().getContextClassLoader());
}
/**
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:1166973-1294443
/hadoop/core/branches/branch-0.19/mapred/src/java/mapred-default.xml:713112
/hadoop/core/trunk/src/mapred/mapred-default.xml:776175-785643
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Tue Feb 28 02:21:42 2012
@@ -19,13 +19,16 @@
package org.apache.hadoop.mapreduce.v2.hs;
import java.io.IOException;
-import java.util.ArrayList;
+import java.net.UnknownHostException;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -34,6 +37,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobACLsManager;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
@@ -54,7 +58,7 @@ import org.apache.hadoop.mapreduce.v2.ut
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.YarnException;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.Records;
/**
@@ -64,50 +68,31 @@ import org.apache.hadoop.yarn.factory.pr
public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job {
static final Log LOG = LogFactory.getLog(CompletedJob.class);
- private final Counters counters;
private final Configuration conf;
- private final JobId jobId;
- private final List<String> diagnostics = new ArrayList<String>();
- private final JobReport report;
- private final Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
- private final Map<TaskId, Task> mapTasks = new HashMap<TaskId, Task>();
- private final Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
- private final String user;
+ private final JobId jobId; //Can be picked from JobInfo with a conversion.
+ private final String user; //Can be picked up from JobInfo
private final Path confFile;
- private JobACLsManager aclsMgr;
- private List<TaskAttemptCompletionEvent> completionEvents = null;
private JobInfo jobInfo;
-
+ private JobReport report;
+ AtomicBoolean tasksLoaded = new AtomicBoolean(false);
+ private Lock tasksLock = new ReentrantLock();
+ private Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
+ private Map<TaskId, Task> mapTasks = new HashMap<TaskId, Task>();
+ private Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
+ private List<TaskAttemptCompletionEvent> completionEvents = null;
+ private JobACLsManager aclsMgr;
+
+
public CompletedJob(Configuration conf, JobId jobId, Path historyFile,
boolean loadTasks, String userName, Path confFile, JobACLsManager aclsMgr)
throws IOException {
LOG.info("Loading job: " + jobId + " from file: " + historyFile);
this.conf = conf;
this.jobId = jobId;
+ this.user = userName;
this.confFile = confFile;
this.aclsMgr = aclsMgr;
-
loadFullHistoryData(loadTasks, historyFile);
- user = userName;
- counters = jobInfo.getTotalCounters();
- diagnostics.add(jobInfo.getErrorInfo());
- report =
- RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
- JobReport.class);
- report.setJobId(jobId);
- report.setJobState(JobState.valueOf(jobInfo.getJobStatus()));
- report.setSubmitTime(jobInfo.getSubmitTime());
- report.setStartTime(jobInfo.getLaunchTime());
- report.setFinishTime(jobInfo.getFinishTime());
- report.setJobName(jobInfo.getJobname());
- report.setUser(jobInfo.getUsername());
- report.setMapProgress((float) getCompletedMaps() / getTotalMaps());
- report.setReduceProgress((float) getCompletedReduces() / getTotalReduces());
- report.setJobFile(confFile.toString());
- report.setTrackingUrl(JobHistoryUtils.getHistoryUrl(conf, TypeConverter
- .toYarn(TypeConverter.fromYarn(jobId)).getAppId()));
- report.setAMInfos(getAMInfos());
- report.setIsUber(isUber());
}
@Override
@@ -122,7 +107,7 @@ public class CompletedJob implements org
@Override
public Counters getAllCounters() {
- return counters;
+ return jobInfo.getTotalCounters();
}
@Override
@@ -131,10 +116,36 @@ public class CompletedJob implements org
}
@Override
- public JobReport getReport() {
+ public synchronized JobReport getReport() {
+ if (report == null) {
+ constructJobReport();
+ }
return report;
}
+ private void constructJobReport() {
+ report = Records.newRecord(JobReport.class);
+ report.setJobId(jobId);
+ report.setJobState(JobState.valueOf(jobInfo.getJobStatus()));
+ report.setSubmitTime(jobInfo.getSubmitTime());
+ report.setStartTime(jobInfo.getLaunchTime());
+ report.setFinishTime(jobInfo.getFinishTime());
+ report.setJobName(jobInfo.getJobname());
+ report.setUser(jobInfo.getUsername());
+ report.setMapProgress((float) getCompletedMaps() / getTotalMaps());
+ report.setReduceProgress((float) getCompletedReduces() / getTotalReduces());
+ report.setJobFile(confFile.toString());
+ String historyUrl = "N/A";
+ try {
+ historyUrl = JobHistoryUtils.getHistoryUrl(conf, jobId.getAppId());
+ } catch (UnknownHostException e) {
+ //Ignore.
+ }
+ report.setTrackingUrl(historyUrl);
+ report.setAMInfos(getAMInfos());
+ report.setIsUber(isUber());
+ }
+
@Override
public float getProgress() {
return 1.0f;
@@ -142,16 +153,23 @@ public class CompletedJob implements org
@Override
public JobState getState() {
- return report.getJobState();
+ return JobState.valueOf(jobInfo.getJobStatus());
}
@Override
public Task getTask(TaskId taskId) {
- return tasks.get(taskId);
+ if (tasksLoaded.get()) {
+ return tasks.get(taskId);
+ } else {
+ TaskID oldTaskId = TypeConverter.fromYarn(taskId);
+ CompletedTask completedTask =
+ new CompletedTask(taskId, jobInfo.getAllTasks().get(oldTaskId));
+ return completedTask;
+ }
}
@Override
- public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
+ public synchronized TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
int fromEventId, int maxEvents) {
if (completionEvents == null) {
constructTaskAttemptCompletionEvents();
@@ -167,6 +185,7 @@ public class CompletedJob implements org
}
private void constructTaskAttemptCompletionEvents() {
+ loadAllTasks();
completionEvents = new LinkedList<TaskAttemptCompletionEvent>();
List<TaskAttempt> allTaskAttempts = new LinkedList<TaskAttempt>();
for (TaskId taskId : tasks.keySet()) {
@@ -205,8 +224,8 @@ public class CompletedJob implements org
int eventId = 0;
for (TaskAttempt taskAttempt : allTaskAttempts) {
- TaskAttemptCompletionEvent tace = RecordFactoryProvider.getRecordFactory(
- null).newRecordInstance(TaskAttemptCompletionEvent.class);
+ TaskAttemptCompletionEvent tace =
+ Records.newRecord(TaskAttemptCompletionEvent.class);
int attemptRunTime = -1;
if (taskAttempt.getLaunchTime() != 0 && taskAttempt.getFinishTime() != 0) {
@@ -237,15 +256,42 @@ public class CompletedJob implements org
@Override
public Map<TaskId, Task> getTasks() {
+ loadAllTasks();
return tasks;
}
+ private void loadAllTasks() {
+ if (tasksLoaded.get()) {
+ return;
+ }
+ tasksLock.lock();
+ try {
+ if (tasksLoaded.get()) {
+ return;
+ }
+ for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
+ TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
+ TaskInfo taskInfo = entry.getValue();
+ Task task = new CompletedTask(yarnTaskID, taskInfo);
+ tasks.put(yarnTaskID, task);
+ if (task.getType() == TaskType.MAP) {
+ mapTasks.put(task.getID(), task);
+ } else if (task.getType() == TaskType.REDUCE) {
+ reduceTasks.put(task.getID(), task);
+ }
+ }
+ tasksLoaded.set(true);
+ } finally {
+ tasksLock.unlock();
+ }
+ }
+
//History data is leisurely loaded when task level data is requested
private synchronized void loadFullHistoryData(boolean loadTasks,
Path historyFileAbsolute) throws IOException {
LOG.info("Loading history file: [" + historyFileAbsolute + "]");
- if (jobInfo != null) {
- return; //data already loaded
+ if (this.jobInfo != null) {
+ return;
}
if (historyFileAbsolute != null) {
@@ -254,7 +300,7 @@ public class CompletedJob implements org
parser =
new JobHistoryParser(historyFileAbsolute.getFileSystem(conf),
historyFileAbsolute);
- jobInfo = parser.parse();
+ this.jobInfo = parser.parse();
} catch (IOException e) {
throw new YarnException("Could not load history file "
+ historyFileAbsolute, e);
@@ -268,27 +314,15 @@ public class CompletedJob implements org
} else {
throw new IOException("History file not found");
}
-
if (loadTasks) {
- for (Map.Entry<org.apache.hadoop.mapreduce.TaskID, TaskInfo> entry : jobInfo
- .getAllTasks().entrySet()) {
- TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
- TaskInfo taskInfo = entry.getValue();
- Task task = new CompletedTask(yarnTaskID, taskInfo);
- tasks.put(yarnTaskID, task);
- if (task.getType() == TaskType.MAP) {
- mapTasks.put(task.getID(), task);
- } else if (task.getType() == TaskType.REDUCE) {
- reduceTasks.put(task.getID(), task);
- }
- }
- }
- LOG.info("TaskInfo loaded");
+ loadAllTasks();
+ LOG.info("TaskInfo loaded");
+ }
}
@Override
public List<String> getDiagnostics() {
- return diagnostics;
+ return Collections.singletonList(jobInfo.getErrorInfo());
}
@Override
@@ -318,6 +352,7 @@ public class CompletedJob implements org
@Override
public Map<TaskId, Task> getTasks(TaskType taskType) {
+ loadAllTasks();
if (TaskType.MAP.equals(taskType)) {
return mapTasks;
} else {//we have only two types of tasks
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTask.java Tue Feb 28 02:21:42 2012
@@ -20,10 +20,13 @@ package org.apache.hadoop.mapreduce.v2.h
import java.util.ArrayList;
import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
@@ -35,59 +38,24 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.Records;
public class CompletedTask implements Task {
-
- private final TaskType type;
- private Counters counters;
- private final long startTime;
- private final long finishTime;
- private TaskState state;
private final TaskId taskId;
- private final TaskReport report;
+ private final TaskInfo taskInfo;
+ private TaskReport report;
+ private TaskAttemptId successfulAttempt;
+ private List<String> reportDiagnostics = new LinkedList<String>();
+ private Lock taskAttemptsLock = new ReentrantLock();
+ private AtomicBoolean taskAttemptsLoaded = new AtomicBoolean(false);
private final Map<TaskAttemptId, TaskAttempt> attempts =
new LinkedHashMap<TaskAttemptId, TaskAttempt>();
-
- private static final Log LOG = LogFactory.getLog(CompletedTask.class);
CompletedTask(TaskId taskId, TaskInfo taskInfo) {
//TODO JobHistoryParser.handleTaskFailedAttempt should use state from the event.
- LOG.debug("HandlingTaskId: [" + taskId + "]");
+ this.taskInfo = taskInfo;
this.taskId = taskId;
- this.startTime = taskInfo.getStartTime();
- this.finishTime = taskInfo.getFinishTime();
- this.type = TypeConverter.toYarn(taskInfo.getTaskType());
- if (taskInfo.getCounters() != null)
- this.counters = taskInfo.getCounters();
- if (taskInfo.getTaskStatus() != null) {
- this.state = TaskState.valueOf(taskInfo.getTaskStatus());
- } else {
- this.state = TaskState.KILLED;
- }
- report = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskReport.class);
- for (TaskAttemptInfo attemptHistory : taskInfo.getAllTaskAttempts()
- .values()) {
- CompletedTaskAttempt attempt = new CompletedTaskAttempt(taskId,
- attemptHistory);
- report.addAllDiagnostics(attempt.getDiagnostics()); //TODO TMI?
- attempts.put(attempt.getID(), attempt);
- if (attemptHistory.getTaskStatus() != null
- && attemptHistory.getTaskStatus().equals(
- TaskState.SUCCEEDED.toString())
- && report.getSuccessfulAttempt() == null) {
- report.setSuccessfulAttempt(TypeConverter.toYarn(attemptHistory
- .getAttemptId()));
- }
- }
- report.setTaskId(taskId);
- report.setStartTime(startTime);
- report.setFinishTime(finishTime);
- report.setTaskState(state);
- report.setProgress(getProgress());
- report.setCounters(TypeConverter.toYarn(getCounters()));
- report.addAllRunningAttempts(new ArrayList<TaskAttemptId>(attempts.keySet()));
}
@Override
@@ -97,17 +65,19 @@ public class CompletedTask implements Ta
@Override
public TaskAttempt getAttempt(TaskAttemptId attemptID) {
+ loadAllTaskAttempts();
return attempts.get(attemptID);
}
@Override
public Map<TaskAttemptId, TaskAttempt> getAttempts() {
+ loadAllTaskAttempts();
return attempts;
}
@Override
public Counters getCounters() {
- return counters;
+ return taskInfo.getCounters();
}
@Override
@@ -121,13 +91,18 @@ public class CompletedTask implements Ta
}
@Override
- public TaskReport getReport() {
+ public synchronized TaskReport getReport() {
+ if (report == null) {
+ constructTaskReport();
+ }
return report;
}
+
+
@Override
public TaskType getType() {
- return type;
+ return TypeConverter.toYarn(taskInfo.getTaskType());
}
@Override
@@ -137,7 +112,54 @@ public class CompletedTask implements Ta
@Override
public TaskState getState() {
- return state;
+ return taskInfo.getTaskStatus() == null ? TaskState.KILLED : TaskState
+ .valueOf(taskInfo.getTaskStatus());
}
+ private void constructTaskReport() {
+ loadAllTaskAttempts();
+ this.report = Records.newRecord(TaskReport.class);
+ report.setTaskId(taskId);
+ report.setStartTime(taskInfo.getStartTime());
+ report.setFinishTime(taskInfo.getFinishTime());
+ report.setTaskState(getState());
+ report.setProgress(getProgress());
+ report.setCounters(TypeConverter.toYarn(getCounters()));
+ if (successfulAttempt != null) {
+ report.setSuccessfulAttempt(successfulAttempt);
+ }
+ report.addAllDiagnostics(reportDiagnostics);
+ report
+ .addAllRunningAttempts(new ArrayList<TaskAttemptId>(attempts.keySet()));
+ }
+
+ private void loadAllTaskAttempts() {
+ if (taskAttemptsLoaded.get()) {
+ return;
+ }
+ taskAttemptsLock.lock();
+ try {
+ if (taskAttemptsLoaded.get()) {
+ return;
+ }
+
+ for (TaskAttemptInfo attemptHistory : taskInfo.getAllTaskAttempts()
+ .values()) {
+ CompletedTaskAttempt attempt =
+ new CompletedTaskAttempt(taskId, attemptHistory);
+ reportDiagnostics.addAll(attempt.getDiagnostics());
+ attempts.put(attempt.getID(), attempt);
+ if (successfulAttempt == null
+ && attemptHistory.getTaskStatus() != null
+ && attemptHistory.getTaskStatus().equals(
+ TaskState.SUCCEEDED.toString())) {
+ successfulAttempt =
+ TypeConverter.toYarn(attemptHistory.getAttemptId());
+ }
+ }
+ taskAttemptsLoaded.set(true);
+ } finally {
+ taskAttemptsLock.unlock();
+ }
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java Tue Feb 28 02:21:42 2012
@@ -30,25 +30,21 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.Records;
public class CompletedTaskAttempt implements TaskAttempt {
private final TaskAttemptInfo attemptInfo;
private final TaskAttemptId attemptId;
- private Counters counters;
private final TaskAttemptState state;
- private final TaskAttemptReport report;
private final List<String> diagnostics = new ArrayList<String>();
+ private TaskAttemptReport report;
private String localDiagMessage;
CompletedTaskAttempt(TaskId taskId, TaskAttemptInfo attemptInfo) {
this.attemptInfo = attemptInfo;
this.attemptId = TypeConverter.toYarn(attemptInfo.getAttemptId());
- if (attemptInfo.getCounters() != null) {
- this.counters = attemptInfo.getCounters();
- }
if (attemptInfo.getTaskStatus() != null) {
this.state = TaskAttemptState.valueOf(attemptInfo.getTaskStatus());
} else {
@@ -56,37 +52,9 @@ public class CompletedTaskAttempt implem
localDiagMessage = "Attmpt state missing from History : marked as KILLED";
diagnostics.add(localDiagMessage);
}
-
if (attemptInfo.getError() != null) {
diagnostics.add(attemptInfo.getError());
}
-
- report = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptReport.class);
-
- report.setTaskAttemptId(attemptId);
- report.setTaskAttemptState(state);
- report.setProgress(getProgress());
- report.setStartTime(attemptInfo.getStartTime());
-
- report.setFinishTime(attemptInfo.getFinishTime());
- report.setShuffleFinishTime(attemptInfo.getShuffleFinishTime());
- report.setSortFinishTime(attemptInfo.getSortFinishTime());
- if (localDiagMessage != null) {
- report.setDiagnosticInfo(attemptInfo.getError() + ", " + localDiagMessage);
- } else {
- report.setDiagnosticInfo(attemptInfo.getError());
- }
-// report.setPhase(attemptInfo.get); //TODO
- report.setStateString(attemptInfo.getState());
- report.setCounters(TypeConverter.toYarn(getCounters()));
- report.setContainerId(attemptInfo.getContainerId());
- if (attemptInfo.getHostname() == null) {
- report.setNodeManagerHost("UNKNOWN");
- } else {
- report.setNodeManagerHost(attemptInfo.getHostname());
- report.setNodeManagerPort(attemptInfo.getPort());
- }
- report.setNodeManagerHttpPort(attemptInfo.getHttpPort());
}
@Override
@@ -111,7 +79,7 @@ public class CompletedTaskAttempt implem
@Override
public Counters getCounters() {
- return counters;
+ return attemptInfo.getCounters();
}
@Override
@@ -125,7 +93,10 @@ public class CompletedTaskAttempt implem
}
@Override
- public TaskAttemptReport getReport() {
+ public synchronized TaskAttemptReport getReport() {
+ if (report == null) {
+ constructTaskAttemptReport();
+ }
return report;
}
@@ -146,26 +117,55 @@ public class CompletedTaskAttempt implem
@Override
public long getLaunchTime() {
- return report.getStartTime();
+ return attemptInfo.getStartTime();
}
@Override
public long getFinishTime() {
- return report.getFinishTime();
+ return attemptInfo.getFinishTime();
}
@Override
public long getShuffleFinishTime() {
- return report.getShuffleFinishTime();
+ return attemptInfo.getShuffleFinishTime();
}
@Override
public long getSortFinishTime() {
- return report.getSortFinishTime();
+ return attemptInfo.getSortFinishTime();
}
@Override
public int getShufflePort() {
- throw new UnsupportedOperationException("Not supported yet.");
+ return attemptInfo.getShufflePort();
+ }
+
+ private void constructTaskAttemptReport() {
+ report = Records.newRecord(TaskAttemptReport.class);
+
+ report.setTaskAttemptId(attemptId);
+ report.setTaskAttemptState(state);
+ report.setProgress(getProgress());
+ report.setStartTime(attemptInfo.getStartTime());
+ report.setFinishTime(attemptInfo.getFinishTime());
+ report.setShuffleFinishTime(attemptInfo.getShuffleFinishTime());
+ report.setSortFinishTime(attemptInfo.getSortFinishTime());
+ if (localDiagMessage != null) {
+ report
+ .setDiagnosticInfo(attemptInfo.getError() + ", " + localDiagMessage);
+ } else {
+ report.setDiagnosticInfo(attemptInfo.getError());
+ }
+ // report.setPhase(attemptInfo.get); //TODO
+ report.setStateString(attemptInfo.getState());
+ report.setCounters(TypeConverter.toYarn(getCounters()));
+ report.setContainerId(attemptInfo.getContainerId());
+ if (attemptInfo.getHostname() == null) {
+ report.setNodeManagerHost("UNKNOWN");
+ } else {
+ report.setNodeManagerHost(attemptInfo.getHostname());
+ report.setNodeManagerPort(attemptInfo.getPort());
+ }
+ report.setNodeManagerHttpPort(attemptInfo.getHttpPort());
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java?rev=1294445&r1=1294444&r2=1294445&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java Tue Feb 28 02:21:42 2012
@@ -24,6 +24,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -117,9 +118,8 @@ public class JobHistory extends Abstract
//Maintains a list of known done subdirectories. Not currently used.
private final Set<Path> existingDoneSubdirs = new HashSet<Path>();
-
- private final SortedMap<JobId, Job> loadedJobCache =
- new ConcurrentSkipListMap<JobId, Job>();
+
+ private Map<JobId, Job> loadedJobCache = null;
/**
* Maintains a mapping between intermediate user directories and the last
@@ -167,6 +167,7 @@ public class JobHistory extends Abstract
* .....${DONE_DIR}/VERSION_STRING/YYYY/MM/DD/HH/SERIAL_NUM/jh{index_entries}.jhist
*/
+ @SuppressWarnings("serial")
@Override
public void init(Configuration conf) throws YarnException {
LOG.info("JobHistory Init");
@@ -224,6 +225,16 @@ public class JobHistory extends Abstract
DEFAULT_MOVE_THREAD_INTERVAL);
numMoveThreads = conf.getInt(JHAdminConfig.MR_HISTORY_MOVE_THREAD_COUNT,
DEFAULT_MOVE_THREAD_COUNT);
+
+ loadedJobCache =
+ Collections.synchronizedMap(new LinkedHashMap<JobId, Job>(
+ loadedJobCacheSize + 1, 0.75f, true) {
+ @Override
+ public boolean removeEldestEntry(final Map.Entry<JobId, Job> eldest) {
+ return super.size() > loadedJobCacheSize;
+ }
+ });
+
try {
initExisting();
} catch (IOException e) {
@@ -465,9 +476,6 @@ public class JobHistory extends Abstract
LOG.debug("Adding "+job.getID()+" to loaded job cache");
}
loadedJobCache.put(job.getID(), job);
- if (loadedJobCache.size() > loadedJobCacheSize ) {
- loadedJobCache.remove(loadedJobCache.firstKey());
- }
}
@@ -655,7 +663,7 @@ public class JobHistory extends Abstract
synchronized(metaInfo) {
try {
Job job = new CompletedJob(conf, metaInfo.getJobIndexInfo().getJobId(),
- metaInfo.getHistoryFile(), true, metaInfo.getJobIndexInfo().getUser(),
+ metaInfo.getHistoryFile(), false, metaInfo.getJobIndexInfo().getUser(),
metaInfo.getConfFile(), this.aclsMgr);
addToLoadedJobCache(job);
return job;
@@ -938,7 +946,7 @@ public class JobHistory extends Abstract
LOG.debug("Called getAllJobs()");
return getAllJobsInternal();
}
-
+
static class MetaInfo {
private Path historyFile;
private Path confFile;
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/c++/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/c++:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/c++:1159757-1294443
/hadoop/core/branches/branch-0.19/mapred/src/c++:713112
/hadoop/core/trunk/src/c++:776175-784663
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/src/contrib:713112
/hadoop/core/trunk/src/contrib:784664-785643
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/block_forensics/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/block_forensics:1152502-1294443
/hadoop/core/branches/branch-0.19/hdfs/src/contrib/block_forensics:713112
/hadoop/core/branches/branch-0.19/mapred/src/contrib/block_forensics:713112
/hadoop/core/trunk/src/contrib/block_forensics:784664-785643
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build-contrib.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:1161333-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build-contrib.xml:1161333-1294443
/hadoop/core/branches/branch-0.19/mapred/src/contrib/build-contrib.xml:713112
/hadoop/core/trunk/src/contrib/build-contrib.xml:776175-786373
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/build.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:1161333-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/build.xml:1161333-1294443
/hadoop/core/branches/branch-0.19/mapred/src/contrib/build.xml:713112
/hadoop/core/trunk/src/contrib/build.xml:776175-786373
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/data_join/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/data_join:1159757-1294443
/hadoop/core/branches/branch-0.19/mapred/src/contrib/data_join:713112
/hadoop/core/trunk/src/contrib/data_join:776175-786373
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/eclipse-plugin/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/eclipse-plugin:1159757-1294443
/hadoop/core/branches/branch-0.19/core/src/contrib/eclipse-plugin:713112
/hadoop/core/branches/branch-0.19/mapred/src/contrib/eclipse-plugin:713112
/hadoop/core/trunk/src/contrib/eclipse-plugin:776175-785643
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/index/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/index:1159757-1294443
/hadoop/core/branches/branch-0.19/mapred/src/contrib/index:713112
/hadoop/core/trunk/src/contrib/index:776175-786373
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/contrib/vaidya/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/contrib/vaidya:1159757-1294443
/hadoop/core/branches/branch-0.19/mapred/src/contrib/vaidya:713112
/hadoop/core/trunk/src/contrib/vaidya:776175-786373
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/examples/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/examples:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/examples:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/src/examples:713112
/hadoop/core/trunk/src/examples:776175-784663
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/java:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/java:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/src/java:713112
/hadoop/core/trunk/src/mapred:776175-785643
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/src/test/mapred:713112
/hadoop/core/trunk/src/test/mapred:776175-785643
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/fs:1159757-1294443
/hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/fs:713112
/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/fs:776175-785643
/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/fs:817878-835934
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/hdfs:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/hdfs:713112
/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/hdfs:776175-785643
/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/hdfs:817878-835934
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:1159757-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/test/mapred/org/apache/hadoop/ipc:1159757-1294443
/hadoop/core/branches/branch-0.19/hdfs/src/test/hdfs-with-mr/org/apache/hadoop/ipc:713112
/hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/ipc:713112
/hadoop/core/trunk/src/test/hdfs-with-mr/org/apache/hadoop/ipc:776175-784663
Propchange: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/src/webapps/job/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Feb 28 02:21:42 2012
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:1152502-1294248
+/hadoop/common/trunk/hadoop-mapreduce-project/src/webapps/job:1152502-1294443
/hadoop/core/branches/branch-0.19/mapred/src/webapps/job:713112
/hadoop/core/trunk/src/webapps/job:776175-785643