You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by su...@apache.org on 2013/01/11 20:40:35 UTC
svn commit: r1432246 [1/2] - in
/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project: ./ conf/
dev-support/ hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/
ha...
Author: suresh
Date: Fri Jan 11 19:40:23 2013
New Revision: 1432246
URL: http://svn.apache.org/viewvc?rev=1432246&view=rev
Log:
Merge r1414455:r1426018 from trunk
Added:
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexRecord.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IndexRecord.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapOutputCollector.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapOutputCollector.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/RehashPartitioner.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/RehashPartitioner.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestRehashPartitioner.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestRehashPartitioner.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/pom.xml
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/pom.xml
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
- copied from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
- copied unchanged from r1426018, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMerge.java
Removed:
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/token/DelegationTokenRenewal.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/token/TestDelegationTokenRenewal.java
Modified:
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/ (props changed)
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/conf/ (props changed)
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (contents, props changed)
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientCluster.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRYarnClusterAdapter.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestJobOutputCommitter.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/util/TestProcfsBasedProcessTree.java
hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1419191-1426018
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt Fri Jan 11 19:40:23 2013
@@ -14,6 +14,11 @@ Trunk (Unreleased)
MAPREDUCE-4049. Experimental api to allow for alternate shuffle plugins.
(Avner BenHanoch via acmurthy)
+ MAPREDUCE-4807. Allow MapOutputBuffer to be pluggable. (masokan via tucu)
+
+ MAPREDUCE-4887. Add RehashPartitioner, to smooth distributions
+ with poor implementations of Object#hashCode(). (Radim Kolar via cutting)
+
IMPROVEMENTS
MAPREDUCE-3787. [Gridmix] Optimize job monitoring and STRESS mode for
@@ -71,8 +76,14 @@ Trunk (Unreleased)
MAPREDUCE-4735. Make arguments in TestDFSIO case insensitive.
(Brandon Li via suresh)
+ MAPREDUCE-4809. Change visibility of classes for pluggable sort changes.
+ (masokan via tucu)
+
BUG FIXES
+ MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.
+ (Yu Gao via llu)
+
MAPREDUCE-4356. [Rumen] Provide access to the method
ParsedTask.obtainTaskAttempts(). (ravigummadi)
@@ -171,6 +182,16 @@ Release 2.0.3-alpha - Unreleased
MAPREDUCE-4723. Fix warnings found by findbugs 2. (Sandy Ryza via eli)
+ MAPREDUCE-4703. Add the ability to start the MiniMRClientCluster using
+ the configurations used before it is being stopped. (ahmed.radwan via tucu)
+
+ MAPREDUCE-4845. ClusterStatus.getMaxMemory() and getUsedMemory() exist in
+ MR1 but not MR2. (Sandy Ryza via tomwhite)
+
+ MAPREDUCE-4899. Implemented a MR specific plugin for tracking finished
+ applications that YARN's ResourceManager doesn't keep track off anymore
+ (Derek Dagit via vinodkv)
+
OPTIMIZATIONS
BUG FIXES
@@ -205,6 +226,12 @@ Release 2.0.3-alpha - Unreleased
MAPREDUCE-4800. Cleanup o.a.h.mapred.MapTaskStatus - remove unused
code. (kkambatl via tucu)
+ MAPREDUCE-4861. Cleanup: Remove unused mapreduce.security.token.DelegationTokenRenewal.
+ (kkambatl via tucu)
+
+ MAPREDUCE-4856. TestJobOutputCommitter uses same directory as
+ TestJobCleanup. (Sandy Ryza via tomwhite)
+
Release 2.0.2-alpha - 2012-09-07
INCOMPATIBLE CHANGES
@@ -610,6 +637,20 @@ Release 0.23.6 - UNRELEASED
MAPREDUCE-4836. Elapsed time for running tasks on AM web UI tasks page is 0
(Ravi Prakash via jeagles)
+ MAPREDUCE-4842. Shuffle race can hang reducer (Mariappan Asokan via jlowe)
+
+ MAPREDUCE-4833. Task can get stuck in FAIL_CONTAINER_CLEANUP (Robert
+ Parker via jlowe)
+
+ MAPREDUCE-4793. Problem with adding resources when using both -files and
+ -file to hadoop streaming (jlowe)
+
+ MAPREDUCE-4890. Invalid TaskImpl state transitions when task fails while
+ speculating (jlowe)
+
+ MAPREDUCE-4902. Fix typo "receievd" should be "received" in log output
+ (Albert Chu via jlowe)
+
Release 0.23.5 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1419191-1426018
Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/conf:r1419191-1426018
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml Fri Jan 11 19:40:23 2013
@@ -138,11 +138,6 @@
<Method name="run" />
<Bug pattern="DM_EXIT" />
</Match>
- <Match>
- <Class name="org.apache.hadoop.mapreduce.security.token.DelegationTokenRenewal$DelegationTokenCancelThread" />
- <Method name="run" />
- <Bug pattern="DM_EXIT" />
- </Match>
<!--
We need to cast objects between old and new api objects
-->
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java Fri Jan 11 19:40:23 2013
@@ -231,7 +231,12 @@ public abstract class TaskImpl implement
// Transitions from FAILED state
.addTransition(TaskStateInternal.FAILED, TaskStateInternal.FAILED,
EnumSet.of(TaskEventType.T_KILL,
- TaskEventType.T_ADD_SPEC_ATTEMPT))
+ TaskEventType.T_ADD_SPEC_ATTEMPT,
+ TaskEventType.T_ATTEMPT_COMMIT_PENDING,
+ TaskEventType.T_ATTEMPT_FAILED,
+ TaskEventType.T_ATTEMPT_KILLED,
+ TaskEventType.T_ATTEMPT_LAUNCHED,
+ TaskEventType.T_ATTEMPT_SUCCEEDED))
// Transitions from KILLED state
.addTransition(TaskStateInternal.KILLED, TaskStateInternal.KILLED,
@@ -941,6 +946,13 @@ public abstract class TaskImpl implement
task.handleTaskAttemptCompletion(
taskAttemptId,
TaskAttemptCompletionEventStatus.TIPFAILED);
+
+ // issue kill to all non finished attempts
+ for (TaskAttempt taskAttempt : task.attempts.values()) {
+ task.killUnfinishedAttempt
+ (taskAttempt, "Task has failed. Killing attempt!");
+ }
+ task.inProgressAttempts.clear();
if (task.historyTaskStartGenerated) {
TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, attempt.getDiagnostics(),
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java Fri Jan 11 19:40:23 2013
@@ -191,12 +191,9 @@ public class ContainerLauncherImpl exten
@SuppressWarnings("unchecked")
public synchronized void kill() {
- if(isCompletelyDone()) {
- return;
- }
if(this.state == ContainerState.PREP) {
this.state = ContainerState.KILLED_BEFORE_LAUNCH;
- } else {
+ } else if (!isCompletelyDone()) {
LOG.info("KILLING " + taskAttemptID);
ContainerManager proxy = null;
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java Fri Jan 11 19:40:23 2013
@@ -602,4 +602,73 @@ public class TestTaskImpl {
assertTaskScheduledState();
assertEquals(3, taskAttempts.size());
}
+
+ @Test
+ public void testFailedTransitions() {
+ mockTask = new MockTaskImpl(jobId, partition, dispatcher.getEventHandler(),
+ remoteJobConfFile, conf, taskAttemptListener, committer, jobToken,
+ credentials, clock,
+ completedTasksFromPreviousRun, startCount,
+ metrics, appContext, TaskType.MAP) {
+ @Override
+ protected int getMaxAttempts() {
+ return 1;
+ }
+ };
+ TaskId taskId = getNewTaskID();
+ scheduleTaskAttempt(taskId);
+ launchTaskAttempt(getLastAttempt().getAttemptId());
+
+ // add three more speculative attempts
+ mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(),
+ TaskEventType.T_ADD_SPEC_ATTEMPT));
+ launchTaskAttempt(getLastAttempt().getAttemptId());
+ mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(),
+ TaskEventType.T_ADD_SPEC_ATTEMPT));
+ launchTaskAttempt(getLastAttempt().getAttemptId());
+ mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(),
+ TaskEventType.T_ADD_SPEC_ATTEMPT));
+ launchTaskAttempt(getLastAttempt().getAttemptId());
+ assertEquals(4, taskAttempts.size());
+
+ // have the first attempt fail, verify task failed due to no retries
+ MockTaskAttemptImpl taskAttempt = taskAttempts.get(0);
+ taskAttempt.setState(TaskAttemptState.FAILED);
+ mockTask.handle(new TaskTAttemptEvent(taskAttempt.getAttemptId(),
+ TaskEventType.T_ATTEMPT_FAILED));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+
+ // verify task can no longer be killed
+ mockTask.handle(new TaskEvent(taskId, TaskEventType.T_KILL));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+
+ // verify speculative doesn't launch new tasks
+ mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(),
+ TaskEventType.T_ADD_SPEC_ATTEMPT));
+ mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(),
+ TaskEventType.T_ATTEMPT_LAUNCHED));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+ assertEquals(4, taskAttempts.size());
+
+ // verify attempt events from active tasks don't knock task out of FAILED
+ taskAttempt = taskAttempts.get(1);
+ taskAttempt.setState(TaskAttemptState.COMMIT_PENDING);
+ mockTask.handle(new TaskTAttemptEvent(taskAttempt.getAttemptId(),
+ TaskEventType.T_ATTEMPT_COMMIT_PENDING));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+ taskAttempt.setState(TaskAttemptState.FAILED);
+ mockTask.handle(new TaskTAttemptEvent(taskAttempt.getAttemptId(),
+ TaskEventType.T_ATTEMPT_FAILED));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+ taskAttempt = taskAttempts.get(2);
+ taskAttempt.setState(TaskAttemptState.SUCCEEDED);
+ mockTask.handle(new TaskTAttemptEvent(taskAttempt.getAttemptId(),
+ TaskEventType.T_ATTEMPT_SUCCEEDED));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+ taskAttempt = taskAttempts.get(3);
+ taskAttempt.setState(TaskAttemptState.KILLED);
+ mockTask.handle(new TaskTAttemptEvent(taskAttempt.getAttemptId(),
+ TaskEventType.T_ATTEMPT_KILLED));
+ assertEquals(TaskState.FAILED, mockTask.getState());
+ }
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java Fri Jan 11 19:40:23 2013
@@ -6,8 +6,12 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
+import static org.mockito.Mockito.atLeast;
+import org.mockito.ArgumentCaptor;
import java.net.InetSocketAddress;
+import java.util.concurrent.BrokenBarrierException;
+import java.util.concurrent.CyclicBarrier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -18,15 +22,21 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher.EventType;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.yarn.api.ContainerManager;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.StopContainerResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
@@ -272,6 +282,150 @@ public class TestContainerLauncherImpl {
} finally {
ut.stop();
verify(mockCM).stopContainer(any(StopContainerRequest.class));
-}
+ }
+ }
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @Test
+ public void testContainerCleaned() throws Exception {
+ LOG.info("STARTING testContainerCleaned");
+
+ CyclicBarrier startLaunchBarrier = new CyclicBarrier(2);
+ CyclicBarrier completeLaunchBarrier = new CyclicBarrier(2);
+
+ YarnRPC mockRpc = mock(YarnRPC.class);
+ AppContext mockContext = mock(AppContext.class);
+
+ EventHandler mockEventHandler = mock(EventHandler.class);
+ when(mockContext.getEventHandler()).thenReturn(mockEventHandler);
+
+ ContainerManager mockCM = new ContainerManagerForTest(startLaunchBarrier, completeLaunchBarrier);
+ when(mockRpc.getProxy(eq(ContainerManager.class),
+ any(InetSocketAddress.class), any(Configuration.class)))
+ .thenReturn(mockCM);
+
+ ContainerLauncherImplUnderTest ut =
+ new ContainerLauncherImplUnderTest(mockContext, mockRpc);
+
+ Configuration conf = new Configuration();
+ ut.init(conf);
+ ut.start();
+ try {
+ ContainerId contId = makeContainerId(0l, 0, 0, 1);
+ TaskAttemptId taskAttemptId = makeTaskAttemptId(0l, 0, 0, TaskType.MAP, 0);
+ String cmAddress = "127.0.0.1:8000";
+ StartContainerResponse startResp =
+ recordFactory.newRecordInstance(StartContainerResponse.class);
+ startResp.setServiceResponse(ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID,
+ ShuffleHandler.serializeMetaData(80));
+
+
+ LOG.info("inserting launch event");
+ ContainerRemoteLaunchEvent mockLaunchEvent =
+ mock(ContainerRemoteLaunchEvent.class);
+ when(mockLaunchEvent.getType())
+ .thenReturn(EventType.CONTAINER_REMOTE_LAUNCH);
+ when(mockLaunchEvent.getContainerID())
+ .thenReturn(contId);
+ when(mockLaunchEvent.getTaskAttemptID()).thenReturn(taskAttemptId);
+ when(mockLaunchEvent.getContainerMgrAddress()).thenReturn(cmAddress);
+ ut.handle(mockLaunchEvent);
+
+ startLaunchBarrier.await();
+
+
+ LOG.info("inserting cleanup event");
+ ContainerLauncherEvent mockCleanupEvent =
+ mock(ContainerLauncherEvent.class);
+ when(mockCleanupEvent.getType())
+ .thenReturn(EventType.CONTAINER_REMOTE_CLEANUP);
+ when(mockCleanupEvent.getContainerID())
+ .thenReturn(contId);
+ when(mockCleanupEvent.getTaskAttemptID()).thenReturn(taskAttemptId);
+ when(mockCleanupEvent.getContainerMgrAddress()).thenReturn(cmAddress);
+ ut.handle(mockCleanupEvent);
+
+ completeLaunchBarrier.await();
+
+ ut.waitForPoolToIdle();
+
+ ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class);
+ verify(mockEventHandler, atLeast(2)).handle(arg.capture());
+ boolean containerCleaned = false;
+
+ for (int i =0; i < arg.getAllValues().size(); i++) {
+ LOG.info(arg.getAllValues().get(i).toString());
+ Event currentEvent = arg.getAllValues().get(i);
+ if (currentEvent.getType() == TaskAttemptEventType.TA_CONTAINER_CLEANED) {
+ containerCleaned = true;
+ }
+ }
+ assert(containerCleaned);
+
+ } finally {
+ ut.stop();
+ }
+ }
+
+ private static class ContainerManagerForTest implements ContainerManager {
+
+ private CyclicBarrier startLaunchBarrier;
+ private CyclicBarrier completeLaunchBarrier;
+
+ ContainerManagerForTest (CyclicBarrier startLaunchBarrier, CyclicBarrier completeLaunchBarrier) {
+ this.startLaunchBarrier = startLaunchBarrier;
+ this.completeLaunchBarrier = completeLaunchBarrier;
+ }
+ @Override
+ public StartContainerResponse startContainer(StartContainerRequest request)
+ throws YarnRemoteException {
+ try {
+ startLaunchBarrier.await();
+ completeLaunchBarrier.await();
+ //To ensure the kill is started before the launch
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ } catch (BrokenBarrierException e) {
+ e.printStackTrace();
+ }
+
+ throw new ContainerException("Force fail CM");
+
+ }
+
+ @Override
+ public StopContainerResponse stopContainer(StopContainerRequest request)
+ throws YarnRemoteException {
+
+ return null;
+ }
+
+ @Override
+ public GetContainerStatusResponse getContainerStatus(
+ GetContainerStatusRequest request) throws YarnRemoteException {
+
+ return null;
+ }
}
+
+ @SuppressWarnings("serial")
+ private static class ContainerException extends YarnRemoteException {
+
+ public ContainerException(String message) {
+ super(message);
+ }
+
+ @Override
+ public String getRemoteTrace() {
+ return null;
+ }
+
+ @Override
+ public YarnRemoteException getCause() {
+ return null;
+ }
+
+ }
+
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java Fri Jan 11 19:40:23 2013
@@ -175,6 +175,8 @@ public class ClusterStatus implements Wr
}
+ public static final int UNINITIALIZED_MEMORY_VALUE = -1;
+
private int numActiveTrackers;
private Collection<String> activeTrackers = new ArrayList<String>();
private int numBlacklistedTrackers;
@@ -384,6 +386,22 @@ public class ClusterStatus implements Wr
public JobTrackerStatus getJobTrackerStatus() {
return status;
}
+
+ /**
+ * Returns UNINITIALIZED_MEMORY_VALUE (-1)
+ */
+ @Deprecated
+ public long getMaxMemory() {
+ return UNINITIALIZED_MEMORY_VALUE;
+ }
+
+ /**
+ * Returns UNINITIALIZED_MEMORY_VALUE (-1)
+ */
+ @Deprecated
+ public long getUsedMemory() {
+ return UNINITIALIZED_MEMORY_VALUE;
+ }
/**
* Gets the list of blacklisted trackers along with reasons for blacklisting.
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java Fri Jan 11 19:40:23 2013
@@ -34,6 +34,8 @@ import java.util.concurrent.locks.Reentr
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -54,6 +56,7 @@ import org.apache.hadoop.io.serializer.S
import org.apache.hadoop.mapred.IFile.Writer;
import org.apache.hadoop.mapred.Merger.Segment;
import org.apache.hadoop.mapred.SortedRanges.SkipRangeIterator;
+import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskCounter;
@@ -71,7 +74,9 @@ import org.apache.hadoop.util.StringInte
import org.apache.hadoop.util.StringUtils;
/** A Map task. */
-class MapTask extends Task {
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
+@InterfaceStability.Unstable
+public class MapTask extends Task {
/**
* The size of each record in the index file for the map-outputs.
*/
@@ -338,6 +343,10 @@ class MapTask extends Task {
done(umbilical, reporter);
}
+ public Progress getSortPhase() {
+ return sortPhase;
+ }
+
@SuppressWarnings("unchecked")
private <T> T getSplitDetails(Path file, long offset)
throws IOException {
@@ -367,6 +376,22 @@ class MapTask extends Task {
}
@SuppressWarnings("unchecked")
+ private <KEY, VALUE> MapOutputCollector<KEY, VALUE>
+ createSortingCollector(JobConf job, TaskReporter reporter)
+ throws IOException, ClassNotFoundException {
+ MapOutputCollector<KEY, VALUE> collector
+ = (MapOutputCollector<KEY, VALUE>)
+ ReflectionUtils.newInstance(
+ job.getClass(JobContext.MAP_OUTPUT_COLLECTOR_CLASS_ATTR,
+ MapOutputBuffer.class, MapOutputCollector.class), job);
+ LOG.info("Map output collector class = " + collector.getClass().getName());
+ MapOutputCollector.Context context =
+ new MapOutputCollector.Context(this, job, reporter);
+ collector.init(context);
+ return collector;
+ }
+
+ @SuppressWarnings("unchecked")
private <INKEY,INVALUE,OUTKEY,OUTVALUE>
void runOldMapper(final JobConf job,
final TaskSplitIndex splitIndex,
@@ -388,11 +413,14 @@ class MapTask extends Task {
int numReduceTasks = conf.getNumReduceTasks();
LOG.info("numReduceTasks: " + numReduceTasks);
- MapOutputCollector collector = null;
+ MapOutputCollector<OUTKEY, OUTVALUE> collector = null;
if (numReduceTasks > 0) {
- collector = new MapOutputBuffer(umbilical, job, reporter);
+ collector = createSortingCollector(job, reporter);
} else {
- collector = new DirectMapOutputCollector(umbilical, job, reporter);
+ collector = new DirectMapOutputCollector<OUTKEY, OUTVALUE>();
+ MapOutputCollector.Context context =
+ new MapOutputCollector.Context(this, job, reporter);
+ collector.init(context);
}
MapRunnable<INKEY,INVALUE,OUTKEY,OUTVALUE> runner =
ReflectionUtils.newInstance(job.getMapRunnerClass(), job);
@@ -638,7 +666,7 @@ class MapTask extends Task {
TaskUmbilicalProtocol umbilical,
TaskReporter reporter
) throws IOException, ClassNotFoundException {
- collector = new MapOutputBuffer<K,V>(umbilical, job, reporter);
+ collector = createSortingCollector(job, reporter);
partitions = jobContext.getNumReduceTasks();
if (partitions > 1) {
partitioner = (org.apache.hadoop.mapreduce.Partitioner<K,V>)
@@ -734,17 +762,6 @@ class MapTask extends Task {
output.close(mapperContext);
}
- interface MapOutputCollector<K, V> {
-
- public void collect(K key, V value, int partition
- ) throws IOException, InterruptedException;
- public void close() throws IOException, InterruptedException;
-
- public void flush() throws IOException, InterruptedException,
- ClassNotFoundException;
-
- }
-
class DirectMapOutputCollector<K, V>
implements MapOutputCollector<K, V> {
@@ -752,14 +769,18 @@ class MapTask extends Task {
private TaskReporter reporter = null;
- private final Counters.Counter mapOutputRecordCounter;
- private final Counters.Counter fileOutputByteCounter;
- private final List<Statistics> fsStats;
+ private Counters.Counter mapOutputRecordCounter;
+ private Counters.Counter fileOutputByteCounter;
+ private List<Statistics> fsStats;
+
+ public DirectMapOutputCollector() {
+ }
@SuppressWarnings("unchecked")
- public DirectMapOutputCollector(TaskUmbilicalProtocol umbilical,
- JobConf job, TaskReporter reporter) throws IOException {
- this.reporter = reporter;
+ public void init(MapOutputCollector.Context context
+ ) throws IOException, ClassNotFoundException {
+ this.reporter = context.getReporter();
+ JobConf job = context.getJobConf();
String finalName = getOutputName(getPartition());
FileSystem fs = FileSystem.get(job);
@@ -814,25 +835,27 @@ class MapTask extends Task {
}
}
- private class MapOutputBuffer<K extends Object, V extends Object>
+ @InterfaceAudience.LimitedPrivate({"MapReduce"})
+ @InterfaceStability.Unstable
+ public static class MapOutputBuffer<K extends Object, V extends Object>
implements MapOutputCollector<K, V>, IndexedSortable {
- final int partitions;
- final JobConf job;
- final TaskReporter reporter;
- final Class<K> keyClass;
- final Class<V> valClass;
- final RawComparator<K> comparator;
- final SerializationFactory serializationFactory;
- final Serializer<K> keySerializer;
- final Serializer<V> valSerializer;
- final CombinerRunner<K,V> combinerRunner;
- final CombineOutputCollector<K, V> combineCollector;
+ private int partitions;
+ private JobConf job;
+ private TaskReporter reporter;
+ private Class<K> keyClass;
+ private Class<V> valClass;
+ private RawComparator<K> comparator;
+ private SerializationFactory serializationFactory;
+ private Serializer<K> keySerializer;
+ private Serializer<V> valSerializer;
+ private CombinerRunner<K,V> combinerRunner;
+ private CombineOutputCollector<K, V> combineCollector;
// Compression for map-outputs
- final CompressionCodec codec;
+ private CompressionCodec codec;
// k/v accounting
- final IntBuffer kvmeta; // metadata overlay on backing store
+ private IntBuffer kvmeta; // metadata overlay on backing store
int kvstart; // marks origin of spill metadata
int kvend; // marks end of spill metadata
int kvindex; // marks end of fully serialized records
@@ -856,15 +879,15 @@ class MapTask extends Task {
private static final int METASIZE = NMETA * 4; // size in bytes
// spill accounting
- final int maxRec;
- final int softLimit;
+ private int maxRec;
+ private int softLimit;
boolean spillInProgress;;
int bufferRemaining;
volatile Throwable sortSpillException = null;
int numSpills = 0;
- final int minSpillsForCombine;
- final IndexedSorter sorter;
+ private int minSpillsForCombine;
+ private IndexedSorter sorter;
final ReentrantLock spillLock = new ReentrantLock();
final Condition spillDone = spillLock.newCondition();
final Condition spillReady = spillLock.newCondition();
@@ -872,12 +895,12 @@ class MapTask extends Task {
volatile boolean spillThreadRunning = false;
final SpillThread spillThread = new SpillThread();
- final FileSystem rfs;
+ private FileSystem rfs;
// Counters
- final Counters.Counter mapOutputByteCounter;
- final Counters.Counter mapOutputRecordCounter;
- final Counters.Counter fileOutputByteCounter;
+ private Counters.Counter mapOutputByteCounter;
+ private Counters.Counter mapOutputRecordCounter;
+ private Counters.Counter fileOutputByteCounter;
final ArrayList<SpillRecord> indexCacheList =
new ArrayList<SpillRecord>();
@@ -885,12 +908,23 @@ class MapTask extends Task {
private int indexCacheMemoryLimit;
private static final int INDEX_CACHE_MEMORY_LIMIT_DEFAULT = 1024 * 1024;
+ private MapTask mapTask;
+ private MapOutputFile mapOutputFile;
+ private Progress sortPhase;
+ private Counters.Counter spilledRecordsCounter;
+
+ public MapOutputBuffer() {
+ }
+
@SuppressWarnings("unchecked")
- public MapOutputBuffer(TaskUmbilicalProtocol umbilical, JobConf job,
- TaskReporter reporter
- ) throws IOException, ClassNotFoundException {
- this.job = job;
- this.reporter = reporter;
+ public void init(MapOutputCollector.Context context
+ ) throws IOException, ClassNotFoundException {
+ job = context.getJobConf();
+ reporter = context.getReporter();
+ mapTask = context.getMapTask();
+ mapOutputFile = mapTask.getMapOutputFile();
+ sortPhase = mapTask.getSortPhase();
+ spilledRecordsCounter = reporter.getCounter(TaskCounter.SPILLED_RECORDS);
partitions = job.getNumReduceTasks();
rfs = ((LocalFileSystem)FileSystem.getLocal(job)).getRaw();
@@ -967,7 +1001,7 @@ class MapTask extends Task {
if (combinerRunner != null) {
final Counters.Counter combineOutputCounter =
reporter.getCounter(TaskCounter.COMBINE_OUTPUT_RECORDS);
- combineCollector= new CombineOutputCollector<K,V>(combineOutputCounter, reporter, conf);
+ combineCollector= new CombineOutputCollector<K,V>(combineOutputCounter, reporter, job);
} else {
combineCollector = null;
}
@@ -1118,6 +1152,10 @@ class MapTask extends Task {
}
}
+ private TaskAttemptID getTaskID() {
+ return mapTask.getTaskID();
+ }
+
/**
* Set the point from which meta and serialization data expand. The meta
* indices are aligned with the buffer, so metadata never spans the ends of
@@ -1490,7 +1528,7 @@ class MapTask extends Task {
if (lspillException instanceof Error) {
final String logMsg = "Task " + getTaskID() + " failed : " +
StringUtils.stringifyException(lspillException);
- reportFatalError(getTaskID(), lspillException, logMsg);
+ mapTask.reportFatalError(getTaskID(), lspillException, logMsg);
}
throw new IOException("Spill failed", lspillException);
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java Fri Jan 11 19:40:23 2013
@@ -271,7 +271,7 @@ class SortedRanges implements Writable{
}
public boolean equals(Object o) {
- if(o!=null && o instanceof Range) {
+ if (o instanceof Range) {
Range range = (Range)o;
return startIndex==range.startIndex &&
length==range.length;
@@ -285,10 +285,11 @@ class SortedRanges implements Writable{
}
public int compareTo(Range o) {
- if(this.equals(o)) {
- return 0;
- }
- return (this.startIndex > o.startIndex) ? 1:-1;
+ // Ensure sgn(x.compareTo(y) == -sgn(y.compareTo(x))
+ return this.startIndex < o.startIndex ? -1 :
+ (this.startIndex > o.startIndex ? 1 :
+ (this.length < o.length ? -1 :
+ (this.length > o.length ? 1 : 0)));
}
public void readFields(DataInput in) throws IOException {
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java Fri Jan 11 19:40:23 2013
@@ -26,6 +26,8 @@ import java.util.zip.CheckedInputStream;
import java.util.zip.CheckedOutputStream;
import java.util.zip.Checksum;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -34,7 +36,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.PureJavaCrc32;
-class SpillRecord {
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
+@InterfaceStability.Unstable
+public class SpillRecord {
/** Backing store */
private final ByteBuffer buf;
@@ -143,17 +147,3 @@ class SpillRecord {
}
}
-
-class IndexRecord {
- long startOffset;
- long rawLength;
- long partLength;
-
- public IndexRecord() { }
-
- public IndexRecord(long startOffset, long rawLength, long partLength) {
- this.startOffset = startOffset;
- this.rawLength = rawLength;
- this.partLength = partLength;
- }
-}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java Fri Jan 11 19:40:23 2013
@@ -61,8 +61,7 @@ import org.apache.hadoop.mapreduce.MRCon
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer;
import org.apache.hadoop.mapreduce.task.ReduceContextImpl;
-import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
-import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin.*;
+import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.Progress;
import org.apache.hadoop.util.Progressable;
@@ -169,7 +168,7 @@ abstract public class Task implements Wr
private Iterator<Long> currentRecIndexIterator =
skipRanges.skipRangeIterator();
- private ResourceCalculatorPlugin resourceCalculator = null;
+ private ResourceCalculatorProcessTree pTree;
private long initCpuCumulativeTime = 0;
protected JobConf conf;
@@ -372,7 +371,7 @@ abstract public class Task implements Wr
* Return current state of the task.
* needs to be synchronized as communication thread
* sends the state every second
- * @return
+ * @return task state
*/
synchronized TaskStatus.State getState(){
return this.taskStatus.getRunState();
@@ -558,15 +557,15 @@ abstract public class Task implements Wr
}
}
committer.setupTask(taskContext);
- Class<? extends ResourceCalculatorPlugin> clazz =
- conf.getClass(MRConfig.RESOURCE_CALCULATOR_PLUGIN,
- null, ResourceCalculatorPlugin.class);
- resourceCalculator = ResourceCalculatorPlugin
- .getResourceCalculatorPlugin(clazz, conf);
- LOG.info(" Using ResourceCalculatorPlugin : " + resourceCalculator);
- if (resourceCalculator != null) {
- initCpuCumulativeTime =
- resourceCalculator.getProcResourceValues().getCumulativeCpuTime();
+ Class<? extends ResourceCalculatorProcessTree> clazz =
+ conf.getClass(MRConfig.RESOURCE_CALCULATOR_PROCESS_TREE,
+ null, ResourceCalculatorProcessTree.class);
+ pTree = ResourceCalculatorProcessTree
+ .getResourceCalculatorProcessTree(System.getenv().get("JVM_PID"), clazz, conf);
+ LOG.info(" Using ResourceCalculatorProcessTree : " + pTree);
+ if (pTree != null) {
+ pTree.updateProcessTree();
+ initCpuCumulativeTime = pTree.getCumulativeCpuTime();
}
}
@@ -584,9 +583,9 @@ abstract public class Task implements Wr
return status;
}
- @InterfaceAudience.Private
+ @InterfaceAudience.LimitedPrivate({"MapReduce"})
@InterfaceStability.Unstable
- protected class TaskReporter
+ public class TaskReporter
extends org.apache.hadoop.mapreduce.StatusReporter
implements Runnable, Reporter {
private TaskUmbilicalProtocol umbilical;
@@ -817,14 +816,14 @@ abstract public class Task implements Wr
// Update generic resource counters
updateHeapUsageCounter();
- // Updating resources specified in ResourceCalculatorPlugin
- if (resourceCalculator == null) {
+ // Updating resources specified in ResourceCalculatorProcessTree
+ if (pTree == null) {
return;
}
- ProcResourceValues res = resourceCalculator.getProcResourceValues();
- long cpuTime = res.getCumulativeCpuTime();
- long pMem = res.getPhysicalMemorySize();
- long vMem = res.getVirtualMemorySize();
+ pTree.updateProcessTree();
+ long cpuTime = pTree.getCumulativeCpuTime();
+ long pMem = pTree.getCumulativeRssmem();
+ long vMem = pTree.getCumulativeVmem();
// Remove the CPU time consumed previously by JVM reuse
cpuTime -= initCpuCumulativeTime;
counters.findCounter(TaskCounter.CPU_MILLISECONDS).setValue(cpuTime);
@@ -1466,9 +1465,9 @@ abstract public class Task implements Wr
return reducerContext;
}
- @InterfaceAudience.Private
+ @InterfaceAudience.LimitedPrivate({"MapReduce"})
@InterfaceStability.Unstable
- protected static abstract class CombinerRunner<K,V> {
+ public static abstract class CombinerRunner<K,V> {
protected final Counters.Counter inputCounter;
protected final JobConf job;
protected final TaskReporter reporter;
@@ -1486,13 +1485,13 @@ abstract public class Task implements Wr
* @param iterator the key/value pairs to use as input
* @param collector the output collector
*/
- abstract void combine(RawKeyValueIterator iterator,
+ public abstract void combine(RawKeyValueIterator iterator,
OutputCollector<K,V> collector
) throws IOException, InterruptedException,
ClassNotFoundException;
@SuppressWarnings("unchecked")
- static <K,V>
+ public static <K,V>
CombinerRunner<K,V> create(JobConf job,
TaskAttemptID taskId,
Counters.Counter inputCounter,
@@ -1542,7 +1541,7 @@ abstract public class Task implements Wr
}
@SuppressWarnings("unchecked")
- protected void combine(RawKeyValueIterator kvIter,
+ public void combine(RawKeyValueIterator kvIter,
OutputCollector<K,V> combineCollector
) throws IOException {
Reducer<K,V,K,V> combiner =
@@ -1611,7 +1610,7 @@ abstract public class Task implements Wr
@SuppressWarnings("unchecked")
@Override
- void combine(RawKeyValueIterator iterator,
+ public void combine(RawKeyValueIterator iterator,
OutputCollector<K,V> collector
) throws IOException, InterruptedException,
ClassNotFoundException {
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRConfig.java Fri Jan 11 19:40:23 2013
@@ -55,8 +55,8 @@ public interface MRConfig {
public static final long DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT =
7*24*60*60*1000; // 7 days
- public static final String RESOURCE_CALCULATOR_PLUGIN =
- "mapreduce.job.resourcecalculatorplugin";
+ public static final String RESOURCE_CALCULATOR_PROCESS_TREE =
+ "mapreduce.job.process-tree.class";
public static final String STATIC_RESOLUTIONS =
"mapreduce.job.net.static.resolutions";
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Fri Jan 11 19:40:23 2013
@@ -30,6 +30,9 @@ public interface MRJobConfig {
public static final String MAP_CLASS_ATTR = "mapreduce.job.map.class";
+ public static final String MAP_OUTPUT_COLLECTOR_CLASS_ATTR
+ = "mapreduce.job.map.output.collector.class";
+
public static final String COMBINE_CLASS_ATTR = "mapreduce.job.combine.class";
public static final String REDUCE_CLASS_ATTR = "mapreduce.job.reduce.class";
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ExceptionReporter.java Fri Jan 11 19:40:23 2013
@@ -17,9 +17,14 @@
*/
package org.apache.hadoop.mapreduce.task.reduce;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
/**
* An interface for reporting exceptions to other threads
*/
-interface ExceptionReporter {
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
+@InterfaceStability.Unstable
+public interface ExceptionReporter {
void reportException(Throwable t);
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java Fri Jan 11 19:40:23 2013
@@ -282,7 +282,7 @@ class Fetcher<K,V> extends Thread {
LOG.debug("url="+msgToEncode+";encHash="+encHash+";replyHash="+replyHash);
// verify that replyHash is HMac of encHash
SecureShuffleUtils.verifyReply(replyHash, encHash, jobTokenSecret);
- LOG.info("for url="+msgToEncode+" sent hash and receievd reply");
+ LOG.info("for url="+msgToEncode+" sent hash and received reply");
} catch (IOException ie) {
boolean connectExcpt = ie instanceof ConnectException;
ioErrs.increment(1);
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java Fri Jan 11 19:40:23 2013
@@ -20,9 +20,14 @@ package org.apache.hadoop.mapreduce.task
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
import org.apache.hadoop.mapreduce.TaskAttemptID;
-class MapHost {
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
+@InterfaceStability.Unstable
+public class MapHost {
public static enum State {
IDLE, // No map outputs available
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapOutput.java Fri Jan 11 19:40:23 2013
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
@@ -33,7 +35,9 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapOutputFile;
import org.apache.hadoop.mapreduce.TaskAttemptID;
-class MapOutput<K,V> {
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
+@InterfaceStability.Unstable
+public class MapOutput<K,V> {
private static final Log LOG = LogFactory.getLog(MapOutput.class);
private static AtomicInteger ID = new AtomicInteger(0);
@@ -62,7 +66,7 @@ class MapOutput<K,V> {
private final boolean primaryMapOutput;
- MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, long size,
+ public MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, long size,
JobConf conf, LocalDirAllocator localDirAllocator,
int fetcher, boolean primaryMapOutput, MapOutputFile mapOutputFile)
throws IOException {
@@ -87,7 +91,7 @@ class MapOutput<K,V> {
this.primaryMapOutput = primaryMapOutput;
}
- MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, int size,
+ public MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, int size,
boolean primaryMapOutput) {
this.id = ID.incrementAndGet();
this.mapId = mapId;
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java Fri Jan 11 19:40:23 2013
@@ -58,8 +58,10 @@ import org.apache.hadoop.mapreduce.task.
import org.apache.hadoop.util.Progress;
import org.apache.hadoop.util.ReflectionUtils;
-@SuppressWarnings(value={"unchecked", "deprecation"})
-@InterfaceAudience.Private
+import com.google.common.annotations.VisibleForTesting;
+
+@SuppressWarnings(value={"unchecked"})
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
@InterfaceStability.Unstable
public class MergeManager<K, V> {
@@ -85,7 +87,7 @@ public class MergeManager<K, V> {
Set<MapOutput<K, V>> inMemoryMapOutputs =
new TreeSet<MapOutput<K,V>>(new MapOutputComparator<K, V>());
- private final InMemoryMerger inMemoryMerger;
+ private final MergeThread<MapOutput<K,V>, K,V> inMemoryMerger;
Set<Path> onDiskMapOutputs = new TreeSet<Path>();
private final OnDiskMerger onDiskMerger;
@@ -179,6 +181,8 @@ public class MergeManager<K, V> {
+ singleShuffleMemoryLimitPercent);
}
+ usedMemory = 0L;
+ commitMemory = 0L;
this.maxSingleShuffleLimit =
(long)(memoryLimit * singleShuffleMemoryLimitPercent);
this.memToMemMergeOutputsThreshold =
@@ -210,7 +214,7 @@ public class MergeManager<K, V> {
this.memToMemMerger = null;
}
- this.inMemoryMerger = new InMemoryMerger(this);
+ this.inMemoryMerger = createInMemoryMerger();
this.inMemoryMerger.start();
this.onDiskMerger = new OnDiskMerger(this);
@@ -219,11 +223,19 @@ public class MergeManager<K, V> {
this.mergePhase = mergePhase;
}
+ protected MergeThread<MapOutput<K,V>, K,V> createInMemoryMerger() {
+ return new InMemoryMerger(this);
+ }
TaskAttemptID getReduceId() {
return reduceId;
}
+ @VisibleForTesting
+ ExceptionReporter getExceptionReporter() {
+ return exceptionReporter;
+ }
+
public void waitForInMemoryMerge() throws InterruptedException {
inMemoryMerger.waitForMerge();
}
@@ -288,7 +300,6 @@ public class MergeManager<K, V> {
}
synchronized void unreserve(long size) {
- commitMemory -= size;
usedMemory -= size;
}
@@ -300,24 +311,20 @@ public class MergeManager<K, V> {
commitMemory+= mapOutput.getSize();
- synchronized (inMemoryMerger) {
- // Can hang if mergeThreshold is really low.
- if (!inMemoryMerger.isInProgress() && commitMemory >= mergeThreshold) {
- LOG.info("Starting inMemoryMerger's merge since commitMemory=" +
- commitMemory + " > mergeThreshold=" + mergeThreshold +
- ". Current usedMemory=" + usedMemory);
- inMemoryMapOutputs.addAll(inMemoryMergedMapOutputs);
- inMemoryMergedMapOutputs.clear();
- inMemoryMerger.startMerge(inMemoryMapOutputs);
- }
+ // Can hang if mergeThreshold is really low.
+ if (commitMemory >= mergeThreshold) {
+ LOG.info("Starting inMemoryMerger's merge since commitMemory=" +
+ commitMemory + " > mergeThreshold=" + mergeThreshold +
+ ". Current usedMemory=" + usedMemory);
+ inMemoryMapOutputs.addAll(inMemoryMergedMapOutputs);
+ inMemoryMergedMapOutputs.clear();
+ inMemoryMerger.startMerge(inMemoryMapOutputs);
+ commitMemory = 0L; // Reset commitMemory.
}
if (memToMemMerger != null) {
- synchronized (memToMemMerger) {
- if (!memToMemMerger.isInProgress() &&
- inMemoryMapOutputs.size() >= memToMemMergeOutputsThreshold) {
- memToMemMerger.startMerge(inMemoryMapOutputs);
- }
+ if (inMemoryMapOutputs.size() >= memToMemMergeOutputsThreshold) {
+ memToMemMerger.startMerge(inMemoryMapOutputs);
}
}
}
@@ -333,11 +340,8 @@ public class MergeManager<K, V> {
public synchronized void closeOnDiskFile(Path file) {
onDiskMapOutputs.add(file);
- synchronized (onDiskMerger) {
- if (!onDiskMerger.isInProgress() &&
- onDiskMapOutputs.size() >= (2 * ioSortFactor - 1)) {
- onDiskMerger.startMerge(onDiskMapOutputs);
- }
+ if (onDiskMapOutputs.size() >= (2 * ioSortFactor - 1)) {
+ onDiskMerger.startMerge(onDiskMapOutputs);
}
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java Fri Jan 11 19:40:23 2013
@@ -20,8 +20,10 @@ package org.apache.hadoop.mapreduce.task
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -30,8 +32,8 @@ abstract class MergeThread<T,K,V> extend
private static final Log LOG = LogFactory.getLog(MergeThread.class);
- private volatile boolean inProgress = false;
- private List<T> inputs = new ArrayList<T>();
+ private AtomicInteger numPending = new AtomicInteger(0);
+ private LinkedList<List<T>> pendingToBeMerged;
protected final MergeManager<K,V> manager;
private final ExceptionReporter reporter;
private boolean closed = false;
@@ -39,6 +41,7 @@ abstract class MergeThread<T,K,V> extend
public MergeThread(MergeManager<K,V> manager, int mergeFactor,
ExceptionReporter reporter) {
+ this.pendingToBeMerged = new LinkedList<List<T>>();
this.manager = manager;
this.mergeFactor = mergeFactor;
this.reporter = reporter;
@@ -50,53 +53,55 @@ abstract class MergeThread<T,K,V> extend
interrupt();
}
- public synchronized boolean isInProgress() {
- return inProgress;
- }
-
- public synchronized void startMerge(Set<T> inputs) {
+ public void startMerge(Set<T> inputs) {
if (!closed) {
- inProgress = true;
- this.inputs = new ArrayList<T>();
+ numPending.incrementAndGet();
+ List<T> toMergeInputs = new ArrayList<T>();
Iterator<T> iter=inputs.iterator();
for (int ctr = 0; iter.hasNext() && ctr < mergeFactor; ++ctr) {
- this.inputs.add(iter.next());
+ toMergeInputs.add(iter.next());
iter.remove();
}
- LOG.info(getName() + ": Starting merge with " + this.inputs.size() +
+ LOG.info(getName() + ": Starting merge with " + toMergeInputs.size() +
" segments, while ignoring " + inputs.size() + " segments");
- notifyAll();
+ synchronized(pendingToBeMerged) {
+ pendingToBeMerged.addLast(toMergeInputs);
+ pendingToBeMerged.notifyAll();
+ }
}
}
public synchronized void waitForMerge() throws InterruptedException {
- while (inProgress) {
+ while (numPending.get() > 0) {
wait();
}
}
public void run() {
while (true) {
+ List<T> inputs = null;
try {
// Wait for notification to start the merge...
- synchronized (this) {
- while (!inProgress) {
- wait();
+ synchronized (pendingToBeMerged) {
+ while(pendingToBeMerged.size() <= 0) {
+ pendingToBeMerged.wait();
}
+ // Pickup the inputs to merge.
+ inputs = pendingToBeMerged.removeFirst();
}
// Merge
merge(inputs);
} catch (InterruptedException ie) {
+ numPending.set(0);
return;
} catch(Throwable t) {
+ numPending.set(0);
reporter.reportException(t);
return;
} finally {
synchronized (this) {
- // Clear inputs
- inputs = null;
- inProgress = false;
+ numPending.decrementAndGet();
notifyAll();
}
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Shuffle.java Fri Jan 11 19:40:23 2013
@@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.util.Progress;
-@InterfaceAudience.LimitedPrivate("mapreduce")
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
@InterfaceStability.Unstable
@SuppressWarnings({"unchecked", "rawtypes"})
public class Shuffle<K, V> implements ShuffleConsumerPlugin<K, V>, ExceptionReporter {
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleClientMetrics.java Fri Jan 11 19:40:23 2013
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.mapreduce.task.reduce;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -25,7 +28,9 @@ import org.apache.hadoop.metrics.Metrics
import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater;
-class ShuffleClientMetrics implements Updater {
+@InterfaceAudience.LimitedPrivate({"MapReduce"})
+@InterfaceStability.Unstable
+public class ShuffleClientMetrics implements Updater {
private MetricsRecord shuffleMetrics = null;
private int numFailedFetches = 0;
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/LinuxResourceCalculatorPlugin.java Fri Jan 11 19:40:23 2013
@@ -409,7 +409,7 @@ public class LinuxResourceCalculatorPlug
@Override
public ProcResourceValues getProcResourceValues() {
- pTree = pTree.getProcessTree();
+ pTree.updateProcessTree();
long cpuTime = pTree.getCumulativeCpuTime();
long pMem = pTree.getCumulativeRssmem();
long vMem = pTree.getCumulativeVmem();
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ProcfsBasedProcessTree.java Fri Jan 11 19:40:23 2013
@@ -166,12 +166,10 @@ public class ProcfsBasedProcessTree exte
}
/**
- * Get the process-tree with latest state. If the root-process is not alive,
- * an empty tree will be returned.
- *
- * @return the process-tree with latest state.
+ * Update the process-tree with latest state. If the root-process is not alive,
+ * tree will become empty.
*/
- public ProcfsBasedProcessTree getProcessTree() {
+ public void updateProcessTree() {
if (!pid.equals(deadPid)) {
// Get the list of processes
List<String> processList = getProcessList();
@@ -197,7 +195,7 @@ public class ProcfsBasedProcessTree exte
}
if (me == null) {
- return this;
+ return;
}
// Add each process to its parent.
@@ -239,7 +237,6 @@ public class ProcfsBasedProcessTree exte
LOG.debug(this.toString());
}
}
- return this;
}
/**
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Fri Jan 11 19:40:23 2013
@@ -938,4 +938,12 @@
<value>jhs/_HOST@REALM.TLD</value>
</property>
+<property>
+ <name>mapreduce.job.map.output.collector.class</name>
+ <value>org.apache.hadoop.mapred.MapTask$MapOutputBuffer</value>
+ <description>
+ It defines the MapOutputCollector implementation to use.
+ </description>
+</property>
+
</configuration>
Propchange: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1419191-1426018
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java?rev=1432246&r1=1432245&r2=1432246&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java Fri Jan 11 19:40:23 2013
@@ -78,12 +78,12 @@ public class HsJobsBlock extends HtmlBlo
.append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
.append("<a href='").append(url("job", job.getId())).append("'>")
.append(job.getId()).append("</a>\",\"")
- .append(StringEscapeUtils.escapeHtml(job.getName()))
- .append("\",\"")
- .append(StringEscapeUtils.escapeHtml(job.getUserName()))
- .append("\",\"")
- .append(StringEscapeUtils.escapeHtml(job.getQueueName()))
- .append("\",\"")
+ .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
+ job.getName()))).append("\",\"")
+ .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
+ job.getUserName()))).append("\",\"")
+ .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
+ job.getQueueName()))).append("\",\"")
.append(job.getState()).append("\",\"")
.append(String.valueOf(job.getMapsTotal())).append("\",\"")
.append(String.valueOf(job.getMapsCompleted())).append("\",\"")