You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by su...@apache.org on 2013/01/03 22:24:08 UTC
svn commit: r1428601 [1/2] - in
/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project: ./ conf/
hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/had...
Author: suresh
Date: Thu Jan 3 21:23:58 2013
New Revision: 1428601
URL: http://svn.apache.org/viewvc?rev=1428601&view=rev
Log:
Merging trunk changes to branch-trunk-win
Added:
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventType.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventType.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterJobAbortEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterJobAbortEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterJobCommitEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterJobCommitEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterJobSetupEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterJobSetupEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterTaskAbortEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterTaskAbortEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/package-info.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/package-info.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobAbortCompletedEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobAbortCompletedEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobCommitCompletedEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobCommitCompletedEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobCommitFailedEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobCommitFailedEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobSetupCompletedEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobSetupCompletedEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobSetupFailedEvent.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobSetupFailedEvent.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/RehashPartitioner.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/RehashPartitioner.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestRehashPartitioner.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestRehashPartitioner.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/pom.xml
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/pom.xml
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/MapReduceTrackingUriPlugin.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
- copied from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
- copied unchanged from r1428155, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
Removed:
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/taskclean/
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/ (props changed)
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/conf/ (props changed)
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/JobStateInternal.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobEventType.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttemptContainerRequest.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/ClusterStatus.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeManager.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MergeThread.java
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (contents, props changed)
hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1423068-1428155
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/CHANGES.txt?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/CHANGES.txt Thu Jan 3 21:23:58 2013
@@ -16,6 +16,9 @@ Trunk (Unreleased)
MAPREDUCE-4807. Allow MapOutputBuffer to be pluggable. (masokan via tucu)
+ MAPREDUCE-4887. Add RehashPartitioner, to smooth distributions
+ with poor implementations of Object#hashCode(). (Radim Kolar via cutting)
+
IMPROVEMENTS
MAPREDUCE-3787. [Gridmix] Optimize job monitoring and STRESS mode for
@@ -78,6 +81,9 @@ Trunk (Unreleased)
BUG FIXES
+ MAPREDUCE-4272. SortedRanges.Range#compareTo is not spec compliant.
+ (Yu Gao via llu)
+
MAPREDUCE-4356. [Rumen] Provide access to the method
ParsedTask.obtainTaskAttempts(). (ravigummadi)
@@ -154,6 +160,9 @@ Trunk (Unreleased)
MAPREDUCE-4574. Fix TotalOrderParitioner to work with
non-WritableComparable key types. (harsh)
+ MAPREDUCE-4884. Streaming tests fail to start MiniMRCluster due to missing
+ queue configuration. (Chris Nauroth via suresh)
+
Release 2.0.3-alpha - Unreleased
INCOMPATIBLE CHANGES
@@ -179,6 +188,13 @@ Release 2.0.3-alpha - Unreleased
MAPREDUCE-4703. Add the ability to start the MiniMRClientCluster using
the configurations used before it is being stopped. (ahmed.radwan via tucu)
+ MAPREDUCE-4845. ClusterStatus.getMaxMemory() and getUsedMemory() exist in
+ MR1 but not MR2. (Sandy Ryza via tomwhite)
+
+ MAPREDUCE-4899. Implemented a MR specific plugin for tracking finished
+ applications that YARN's ResourceManager doesn't keep track off anymore
+ (Derek Dagit via vinodkv)
+
OPTIMIZATIONS
BUG FIXES
@@ -624,6 +640,22 @@ Release 0.23.6 - UNRELEASED
MAPREDUCE-4836. Elapsed time for running tasks on AM web UI tasks page is 0
(Ravi Prakash via jeagles)
+ MAPREDUCE-4842. Shuffle race can hang reducer (Mariappan Asokan via jlowe)
+
+ MAPREDUCE-4833. Task can get stuck in FAIL_CONTAINER_CLEANUP (Robert
+ Parker via jlowe)
+
+ MAPREDUCE-4793. Problem with adding resources when using both -files and
+ -file to hadoop streaming (jlowe)
+
+ MAPREDUCE-4890. Invalid TaskImpl state transitions when task fails while
+ speculating (jlowe)
+
+ MAPREDUCE-4902. Fix typo "receievd" should be "received" in log output
+ (Albert Chu via jlowe)
+
+ MAPREDUCE-4813. AM timing out during job commit (jlowe via bobby)
+
Release 0.23.5 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1423068-1428155
Propchange: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/conf:r1423068-1428155
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java Thu Jan 3 21:23:58 2013
@@ -20,7 +20,6 @@ package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
@@ -42,12 +41,12 @@ public class MapTaskAttemptImpl extends
EventHandler eventHandler, Path jobFile,
int partition, TaskSplitMetaInfo splitInfo, JobConf conf,
TaskAttemptListener taskAttemptListener,
- OutputCommitter committer, Token<JobTokenIdentifier> jobToken,
+ Token<JobTokenIdentifier> jobToken,
Credentials credentials, Clock clock,
AppContext appContext) {
super(taskId, attempt, eventHandler,
taskAttemptListener, jobFile, partition, conf, splitInfo.getLocations(),
- committer, jobToken, credentials, clock, appContext);
+ jobToken, credentials, clock, appContext);
this.splitInfo = splitInfo;
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java Thu Jan 3 21:23:58 2013
@@ -20,7 +20,6 @@ package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@@ -40,12 +39,12 @@ public class ReduceTaskAttemptImpl exten
public ReduceTaskAttemptImpl(TaskId id, int attempt,
EventHandler eventHandler, Path jobFile, int partition,
int numMapTasks, JobConf conf,
- TaskAttemptListener taskAttemptListener, OutputCommitter committer,
+ TaskAttemptListener taskAttemptListener,
Token<JobTokenIdentifier> jobToken,
Credentials credentials, Clock clock,
AppContext appContext) {
super(id, attempt, eventHandler, taskAttemptListener, jobFile, partition,
- conf, new String[] {}, committer, jobToken, credentials, clock,
+ conf, new String[] {}, jobToken, credentials, clock,
appContext);
this.numMapTasks = numMapTasks;
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java Thu Jan 3 21:23:58 2013
@@ -62,6 +62,9 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEvent;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventType;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -87,8 +90,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.speculate.DefaultSpeculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanerImpl;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.Credentials;
@@ -162,7 +163,7 @@ public class MRAppMaster extends Composi
private Recovery recoveryServ;
private ContainerAllocator containerAllocator;
private ContainerLauncher containerLauncher;
- private TaskCleaner taskCleaner;
+ private EventHandler<CommitterEvent> committerEventHandler;
private Speculator speculator;
private TaskAttemptListener taskAttemptListener;
private JobTokenSecretManager jobTokenSecretManager =
@@ -268,8 +269,8 @@ public class MRAppMaster extends Composi
addIfService(taskAttemptListener);
//service to do the task cleanup
- taskCleaner = createTaskCleaner(context);
- addIfService(taskCleaner);
+ committerEventHandler = createCommitterEventHandler(context, committer);
+ addIfService(committerEventHandler);
//service to handle requests from JobClient
clientService = createClientService(context);
@@ -288,7 +289,7 @@ public class MRAppMaster extends Composi
dispatcher.register(TaskEventType.class, new TaskEventDispatcher());
dispatcher.register(TaskAttemptEventType.class,
new TaskAttemptEventDispatcher());
- dispatcher.register(TaskCleaner.EventType.class, taskCleaner);
+ dispatcher.register(CommitterEventType.class, committerEventHandler);
if (conf.getBoolean(MRJobConfig.MAP_SPECULATIVE, false)
|| conf.getBoolean(MRJobConfig.REDUCE_SPECULATIVE, false)) {
@@ -493,7 +494,7 @@ public class MRAppMaster extends Composi
Job newJob =
new JobImpl(jobId, appAttemptID, conf, dispatcher.getEventHandler(),
taskAttemptListener, jobTokenSecretManager, fsTokens, clock,
- completedTasksFromPreviousRun, metrics, committer, newApiCommitter,
+ completedTasksFromPreviousRun, metrics, newApiCommitter,
currentUser.getUserName(), appSubmitTime, amInfos, context);
((RunningAppContext) context).jobs.put(newJob.getID(), newJob);
@@ -585,8 +586,9 @@ public class MRAppMaster extends Composi
return lis;
}
- protected TaskCleaner createTaskCleaner(AppContext context) {
- return new TaskCleanerImpl(context);
+ protected EventHandler<CommitterEvent> createCommitterEventHandler(
+ AppContext context, OutputCommitter committer) {
+ return new CommitterEventHandler(context, committer);
}
protected ContainerAllocator createContainerAllocator(
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/JobStateInternal.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/JobStateInternal.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/JobStateInternal.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/JobStateInternal.java Thu Jan 3 21:23:58 2013
@@ -20,11 +20,15 @@ package org.apache.hadoop.mapreduce.v2.a
public enum JobStateInternal {
NEW,
+ SETUP,
INITED,
RUNNING,
+ COMMITTING,
SUCCEEDED,
+ FAIL_ABORT,
FAILED,
KILL_WAIT,
+ KILL_ABORT,
KILLED,
ERROR
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobEventType.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobEventType.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobEventType.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/event/JobEventType.java Thu Jan 3 21:23:58 2013
@@ -35,6 +35,13 @@ public enum JobEventType {
JOB_MAP_TASK_RESCHEDULED,
JOB_TASK_ATTEMPT_COMPLETED,
+ //Producer:CommitterEventHandler
+ JOB_SETUP_COMPLETED,
+ JOB_SETUP_FAILED,
+ JOB_COMMIT_COMPLETED,
+ JOB_COMMIT_FAILED,
+ JOB_ABORT_COMPLETED,
+
//Producer:Job
JOB_COMPLETED,
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java Thu Jan 3 21:23:58 2013
@@ -47,7 +47,6 @@ import org.apache.hadoop.mapreduce.Count
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
@@ -77,14 +76,20 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterJobAbortEvent;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterJobCommitEvent;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterJobSetupEvent;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.job.event.JobAbortCompletedEvent;
+import org.apache.hadoop.mapreduce.v2.app.job.event.JobCommitFailedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobDiagnosticsUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent;
+import org.apache.hadoop.mapreduce.v2.app.job.event.JobSetupFailedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptCompletedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptFetchFailureEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent;
@@ -138,7 +143,6 @@ public class JobImpl implements org.apac
private final Clock clock;
private final JobACLsManager aclsManager;
private final String username;
- private final OutputCommitter committer;
private final Map<JobACL, AccessControlList> jobACLs;
private float setupWeight = 0.05f;
private float cleanupWeight = 0.05f;
@@ -176,6 +180,7 @@ public class JobImpl implements org.apac
private Counters fullCounters = null;
private Counters finalMapCounters = null;
private Counters finalReduceCounters = null;
+
// FIXME:
//
// Can then replace task-level uber counters (MR-2424) with job-level ones
@@ -245,7 +250,7 @@ public class JobImpl implements org.apac
DIAGNOSTIC_UPDATE_TRANSITION)
.addTransition(JobStateInternal.INITED, JobStateInternal.INITED,
JobEventType.JOB_COUNTER_UPDATE, COUNTER_UPDATE_TRANSITION)
- .addTransition(JobStateInternal.INITED, JobStateInternal.RUNNING,
+ .addTransition(JobStateInternal.INITED, JobStateInternal.SETUP,
JobEventType.JOB_START,
new StartTransition())
.addTransition(JobStateInternal.INITED, JobStateInternal.KILLED,
@@ -257,19 +262,43 @@ public class JobImpl implements org.apac
// Ignore-able events
.addTransition(JobStateInternal.INITED, JobStateInternal.INITED,
JobEventType.JOB_UPDATED_NODES)
-
+
+ // Transitions from SETUP state
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.SETUP,
+ JobEventType.JOB_DIAGNOSTIC_UPDATE,
+ DIAGNOSTIC_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.SETUP,
+ JobEventType.JOB_COUNTER_UPDATE, COUNTER_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.RUNNING,
+ JobEventType.JOB_SETUP_COMPLETED,
+ new SetupCompletedTransition())
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.FAIL_ABORT,
+ JobEventType.JOB_SETUP_FAILED,
+ new SetupFailedTransition())
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.KILL_ABORT,
+ JobEventType.JOB_KILL,
+ new KilledDuringSetupTransition())
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.ERROR,
+ JobEventType.INTERNAL_ERROR,
+ INTERNAL_ERROR_TRANSITION)
+ // Ignore-able events
+ .addTransition(JobStateInternal.SETUP, JobStateInternal.SETUP,
+ JobEventType.JOB_UPDATED_NODES)
+
// Transitions from RUNNING state
.addTransition(JobStateInternal.RUNNING, JobStateInternal.RUNNING,
JobEventType.JOB_TASK_ATTEMPT_COMPLETED,
TASK_ATTEMPT_COMPLETED_EVENT_TRANSITION)
.addTransition
(JobStateInternal.RUNNING,
- EnumSet.of(JobStateInternal.RUNNING, JobStateInternal.SUCCEEDED, JobStateInternal.FAILED),
+ EnumSet.of(JobStateInternal.RUNNING,
+ JobStateInternal.COMMITTING, JobStateInternal.FAIL_ABORT),
JobEventType.JOB_TASK_COMPLETED,
new TaskCompletedTransition())
.addTransition
(JobStateInternal.RUNNING,
- EnumSet.of(JobStateInternal.RUNNING, JobStateInternal.SUCCEEDED, JobStateInternal.FAILED),
+ EnumSet.of(JobStateInternal.RUNNING,
+ JobStateInternal.COMMITTING),
JobEventType.JOB_COMPLETED,
new JobNoTasksCompletedTransition())
.addTransition(JobStateInternal.RUNNING, JobStateInternal.KILL_WAIT,
@@ -296,7 +325,8 @@ public class JobImpl implements org.apac
// Transitions from KILL_WAIT state.
.addTransition
(JobStateInternal.KILL_WAIT,
- EnumSet.of(JobStateInternal.KILL_WAIT, JobStateInternal.KILLED),
+ EnumSet.of(JobStateInternal.KILL_WAIT,
+ JobStateInternal.KILL_ABORT),
JobEventType.JOB_TASK_COMPLETED,
new KillWaitTaskCompletedTransition())
.addTransition(JobStateInternal.KILL_WAIT, JobStateInternal.KILL_WAIT,
@@ -318,6 +348,35 @@ public class JobImpl implements org.apac
JobEventType.JOB_MAP_TASK_RESCHEDULED,
JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE))
+ // Transitions from COMMITTING state
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.SUCCEEDED,
+ JobEventType.JOB_COMMIT_COMPLETED,
+ new CommitSucceededTransition())
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.FAIL_ABORT,
+ JobEventType.JOB_COMMIT_FAILED,
+ new CommitFailedTransition())
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.KILL_ABORT,
+ JobEventType.JOB_KILL,
+ new KilledDuringCommitTransition())
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.COMMITTING,
+ JobEventType.JOB_DIAGNOSTIC_UPDATE,
+ DIAGNOSTIC_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.COMMITTING,
+ JobEventType.JOB_COUNTER_UPDATE, COUNTER_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.ERROR, JobEventType.INTERNAL_ERROR,
+ INTERNAL_ERROR_TRANSITION)
+ // Ignore-able events
+ .addTransition(JobStateInternal.COMMITTING,
+ JobStateInternal.COMMITTING,
+ EnumSet.of(JobEventType.JOB_UPDATED_NODES,
+ JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE))
+
// Transitions from SUCCEEDED state
.addTransition(JobStateInternal.SUCCEEDED, JobStateInternal.SUCCEEDED,
JobEventType.JOB_DIAGNOSTIC_UPDATE,
@@ -334,6 +393,61 @@ public class JobImpl implements org.apac
JobEventType.JOB_UPDATED_NODES,
JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE))
+ // Transitions from FAIL_ABORT state
+ .addTransition(JobStateInternal.FAIL_ABORT,
+ JobStateInternal.FAIL_ABORT,
+ JobEventType.JOB_DIAGNOSTIC_UPDATE,
+ DIAGNOSTIC_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.FAIL_ABORT,
+ JobStateInternal.FAIL_ABORT,
+ JobEventType.JOB_COUNTER_UPDATE, COUNTER_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.FAIL_ABORT, JobStateInternal.FAILED,
+ JobEventType.JOB_ABORT_COMPLETED,
+ new JobAbortCompletedTransition())
+ .addTransition(JobStateInternal.FAIL_ABORT, JobStateInternal.KILLED,
+ JobEventType.JOB_KILL,
+ new KilledDuringAbortTransition())
+ .addTransition(JobStateInternal.FAIL_ABORT,
+ JobStateInternal.ERROR, JobEventType.INTERNAL_ERROR,
+ INTERNAL_ERROR_TRANSITION)
+ // Ignore-able events
+ .addTransition(JobStateInternal.FAIL_ABORT,
+ JobStateInternal.FAIL_ABORT,
+ EnumSet.of(JobEventType.JOB_UPDATED_NODES,
+ JobEventType.JOB_TASK_COMPLETED,
+ JobEventType.JOB_TASK_ATTEMPT_COMPLETED,
+ JobEventType.JOB_MAP_TASK_RESCHEDULED,
+ JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE,
+ JobEventType.JOB_COMMIT_COMPLETED,
+ JobEventType.JOB_COMMIT_FAILED))
+
+ // Transitions from KILL_ABORT state
+ .addTransition(JobStateInternal.KILL_ABORT,
+ JobStateInternal.KILL_ABORT,
+ JobEventType.JOB_DIAGNOSTIC_UPDATE,
+ DIAGNOSTIC_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.KILL_ABORT,
+ JobStateInternal.KILL_ABORT,
+ JobEventType.JOB_COUNTER_UPDATE, COUNTER_UPDATE_TRANSITION)
+ .addTransition(JobStateInternal.KILL_ABORT, JobStateInternal.KILLED,
+ JobEventType.JOB_ABORT_COMPLETED,
+ new JobAbortCompletedTransition())
+ .addTransition(JobStateInternal.KILL_ABORT, JobStateInternal.KILLED,
+ JobEventType.JOB_KILL,
+ new KilledDuringAbortTransition())
+ .addTransition(JobStateInternal.KILL_ABORT,
+ JobStateInternal.ERROR, JobEventType.INTERNAL_ERROR,
+ INTERNAL_ERROR_TRANSITION)
+ // Ignore-able events
+ .addTransition(JobStateInternal.KILL_ABORT,
+ JobStateInternal.KILL_ABORT,
+ EnumSet.of(JobEventType.JOB_UPDATED_NODES,
+ JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE,
+ JobEventType.JOB_SETUP_COMPLETED,
+ JobEventType.JOB_SETUP_FAILED,
+ JobEventType.JOB_COMMIT_COMPLETED,
+ JobEventType.JOB_COMMIT_FAILED))
+
// Transitions from FAILED state
.addTransition(JobStateInternal.FAILED, JobStateInternal.FAILED,
JobEventType.JOB_DIAGNOSTIC_UPDATE,
@@ -351,7 +465,12 @@ public class JobImpl implements org.apac
JobEventType.JOB_TASK_COMPLETED,
JobEventType.JOB_TASK_ATTEMPT_COMPLETED,
JobEventType.JOB_MAP_TASK_RESCHEDULED,
- JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE))
+ JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE,
+ JobEventType.JOB_SETUP_COMPLETED,
+ JobEventType.JOB_SETUP_FAILED,
+ JobEventType.JOB_COMMIT_COMPLETED,
+ JobEventType.JOB_COMMIT_FAILED,
+ JobEventType.JOB_ABORT_COMPLETED))
// Transitions from KILLED state
.addTransition(JobStateInternal.KILLED, JobStateInternal.KILLED,
@@ -366,8 +485,14 @@ public class JobImpl implements org.apac
// Ignore-able events
.addTransition(JobStateInternal.KILLED, JobStateInternal.KILLED,
EnumSet.of(JobEventType.JOB_KILL,
+ JobEventType.JOB_START,
JobEventType.JOB_UPDATED_NODES,
- JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE))
+ JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE,
+ JobEventType.JOB_SETUP_COMPLETED,
+ JobEventType.JOB_SETUP_FAILED,
+ JobEventType.JOB_COMMIT_COMPLETED,
+ JobEventType.JOB_COMMIT_FAILED,
+ JobEventType.JOB_ABORT_COMPLETED))
// No transitions from INTERNAL_ERROR state. Ignore all.
.addTransition(
@@ -381,6 +506,11 @@ public class JobImpl implements org.apac
JobEventType.JOB_DIAGNOSTIC_UPDATE,
JobEventType.JOB_UPDATED_NODES,
JobEventType.JOB_TASK_ATTEMPT_FETCH_FAILURE,
+ JobEventType.JOB_SETUP_COMPLETED,
+ JobEventType.JOB_SETUP_FAILED,
+ JobEventType.JOB_COMMIT_COMPLETED,
+ JobEventType.JOB_COMMIT_FAILED,
+ JobEventType.JOB_ABORT_COMPLETED,
JobEventType.INTERNAL_ERROR))
.addTransition(JobStateInternal.ERROR, JobStateInternal.ERROR,
JobEventType.JOB_COUNTER_UPDATE, COUNTER_UPDATE_TRANSITION)
@@ -417,7 +547,7 @@ public class JobImpl implements org.apac
JobTokenSecretManager jobTokenSecretManager,
Credentials fsTokenCredentials, Clock clock,
Map<TaskId, TaskInfo> completedTasksFromPreviousRun, MRAppMetrics metrics,
- OutputCommitter committer, boolean newApiCommitter, String userName,
+ boolean newApiCommitter, String userName,
long appSubmitTime, List<AMInfo> amInfos, AppContext appContext) {
this.applicationAttemptId = applicationAttemptId;
this.jobId = jobId;
@@ -442,7 +572,6 @@ public class JobImpl implements org.apac
this.fsTokens = fsTokenCredentials;
this.jobTokenSecretManager = jobTokenSecretManager;
- this.committer = committer;
this.aclsManager = new JobACLsManager(conf);
this.username = System.getProperty("user.name");
@@ -461,11 +590,6 @@ public class JobImpl implements org.apac
return jobId;
}
- // Getter methods that make unit testing easier (package-scoped)
- OutputCommitter getCommitter() {
- return this.committer;
- }
-
EventHandler getEventHandler() {
return this.eventHandler;
}
@@ -751,9 +875,16 @@ public class JobImpl implements org.apac
}
private static JobState getExternalState(JobStateInternal smState) {
- if (smState == JobStateInternal.KILL_WAIT) {
+ switch (smState) {
+ case KILL_WAIT:
+ case KILL_ABORT:
return JobState.KILLED;
- } else {
+ case SETUP:
+ case COMMITTING:
+ return JobState.RUNNING;
+ case FAIL_ABORT:
+ return JobState.FAILED;
+ default:
return JobState.valueOf(smState.name());
}
}
@@ -799,22 +930,15 @@ public class JobImpl implements org.apac
return FileSystem.get(conf);
}
- static JobStateInternal checkJobCompleteSuccess(JobImpl job) {
- // check for Job success
- if (job.completedTaskCount == job.tasks.size()) {
- try {
- // Commit job & do cleanup
- job.getCommitter().commitJob(job.getJobContext());
- } catch (IOException e) {
- LOG.error("Could not do commit for Job", e);
- job.addDiagnostic("Job commit failed: " + e.getMessage());
- job.abortJob(org.apache.hadoop.mapreduce.JobStatus.State.FAILED);
- return job.finished(JobStateInternal.FAILED);
- }
- job.logJobHistoryFinishedEvent();
- return job.finished(JobStateInternal.SUCCEEDED);
+ protected JobStateInternal checkReadyForCommit() {
+ JobStateInternal currentState = getInternalState();
+ if (completedTaskCount == tasks.size()
+ && currentState == JobStateInternal.RUNNING) {
+ eventHandler.handle(new CommitterJobCommitEvent(jobId, getJobContext()));
+ return JobStateInternal.COMMITTING;
}
- return null;
+ // return the current state as job not ready to commit yet
+ return getInternalState();
}
JobStateInternal finished(JobStateInternal finalState) {
@@ -1104,25 +1228,21 @@ public class JobImpl implements org.apac
job.allowedReduceFailuresPercent =
job.conf.getInt(MRJobConfig.REDUCE_FAILURES_MAXPERCENT, 0);
- // do the setup
- job.committer.setupJob(job.jobContext);
- job.setupProgress = 1.0f;
-
// create the Tasks but don't start them yet
createMapTasks(job, inputLength, taskSplitMetaInfo);
createReduceTasks(job);
job.metrics.endPreparingJob(job);
return JobStateInternal.INITED;
- //TODO XXX Should JobInitedEvent be generated here (instead of in StartTransition)
-
} catch (IOException e) {
LOG.warn("Job init failed", e);
+ job.metrics.endPreparingJob(job);
job.addDiagnostic("Job init failed : "
+ StringUtils.stringifyException(e));
- job.abortJob(org.apache.hadoop.mapreduce.JobStatus.State.FAILED);
- job.metrics.endPreparingJob(job);
- return job.finished(JobStateInternal.FAILED);
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.FAILED));
+ return JobStateInternal.FAILED;
}
}
@@ -1174,7 +1294,7 @@ public class JobImpl implements org.apac
job.remoteJobConfFile,
job.conf, splits[i],
job.taskAttemptListener,
- job.committer, job.jobToken, job.fsTokens,
+ job.jobToken, job.fsTokens,
job.clock, job.completedTasksFromPreviousRun,
job.applicationAttemptId.getAttemptId(),
job.metrics, job.appContext);
@@ -1191,7 +1311,7 @@ public class JobImpl implements org.apac
job.eventHandler,
job.remoteJobConfFile,
job.conf, job.numMapTasks,
- job.taskAttemptListener, job.committer, job.jobToken,
+ job.taskAttemptListener, job.jobToken,
job.fsTokens, job.clock,
job.completedTasksFromPreviousRun,
job.applicationAttemptId.getAttemptId(),
@@ -1224,6 +1344,35 @@ public class JobImpl implements org.apac
}
} // end of InitTransition
+ private static class SetupCompletedTransition
+ implements SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ job.setupProgress = 1.0f;
+ job.scheduleTasks(job.mapTasks); // schedule (i.e., start) the maps
+ job.scheduleTasks(job.reduceTasks);
+
+ // If we have no tasks, just transition to job completed
+ if (job.numReduceTasks == 0 && job.numMapTasks == 0) {
+ job.eventHandler.handle(new JobEvent(job.jobId,
+ JobEventType.JOB_COMPLETED));
+ }
+ }
+ }
+
+ private static class SetupFailedTransition
+ implements SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ job.metrics.endRunningJob(job);
+ job.addDiagnostic("Job setup failed : "
+ + ((JobSetupFailedEvent) event).getMessage());
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.FAILED));
+ }
+ }
+
public static class StartTransition
implements SingleArcTransition<JobImpl, JobEvent> {
/**
@@ -1233,43 +1382,45 @@ public class JobImpl implements org.apac
@Override
public void transition(JobImpl job, JobEvent event) {
job.startTime = job.clock.getTime();
- job.scheduleTasks(job.mapTasks); // schedule (i.e., start) the maps
- job.scheduleTasks(job.reduceTasks);
JobInitedEvent jie =
new JobInitedEvent(job.oldJobId,
job.startTime,
job.numMapTasks, job.numReduceTasks,
job.getState().toString(),
- job.isUber()); //Will transition to state running. Currently in INITED
+ job.isUber());
job.eventHandler.handle(new JobHistoryEvent(job.jobId, jie));
JobInfoChangeEvent jice = new JobInfoChangeEvent(job.oldJobId,
job.appSubmitTime, job.startTime);
job.eventHandler.handle(new JobHistoryEvent(job.jobId, jice));
job.metrics.runningJob(job);
- // If we have no tasks, just transition to job completed
- if (job.numReduceTasks == 0 && job.numMapTasks == 0) {
- job.eventHandler.handle(new JobEvent(job.jobId, JobEventType.JOB_COMPLETED));
- }
+ job.eventHandler.handle(new CommitterJobSetupEvent(
+ job.jobId, job.jobContext));
}
}
- protected void abortJob(
- org.apache.hadoop.mapreduce.JobStatus.State finalState) {
- try {
- committer.abortJob(jobContext, finalState);
- } catch (IOException e) {
- LOG.warn("Could not abortJob", e);
+ private void unsuccessfulFinish(JobStateInternal finalState) {
+ if (finishTime == 0) setFinishTime();
+ cleanupProgress = 1.0f;
+ JobUnsuccessfulCompletionEvent unsuccessfulJobEvent =
+ new JobUnsuccessfulCompletionEvent(oldJobId,
+ finishTime,
+ succeededMapTaskCount,
+ succeededReduceTaskCount,
+ finalState.toString());
+ eventHandler.handle(new JobHistoryEvent(jobId,
+ unsuccessfulJobEvent));
+ finished(finalState);
+ }
+
+ private static class JobAbortCompletedTransition
+ implements SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ JobStateInternal finalState = JobStateInternal.valueOf(
+ ((JobAbortCompletedEvent) event).getFinalState().name());
+ job.unsuccessfulFinish(finalState);
}
- if (finishTime == 0) setFinishTime();
- cleanupProgress = 1.0f;
- JobUnsuccessfulCompletionEvent unsuccessfulJobEvent =
- new JobUnsuccessfulCompletionEvent(oldJobId,
- finishTime,
- succeededMapTaskCount,
- succeededReduceTaskCount,
- finalState.toString());
- eventHandler.handle(new JobHistoryEvent(jobId, unsuccessfulJobEvent));
}
// JobFinishedEvent triggers the move of the history file out of the staging
@@ -1343,9 +1494,22 @@ public class JobImpl implements org.apac
implements SingleArcTransition<JobImpl, JobEvent> {
@Override
public void transition(JobImpl job, JobEvent event) {
- job.abortJob(org.apache.hadoop.mapreduce.JobStatus.State.KILLED);
job.addDiagnostic("Job received Kill in INITED state.");
- job.finished(JobStateInternal.KILLED);
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.KILLED));
+ }
+ }
+
+ private static class KilledDuringSetupTransition
+ implements SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ job.metrics.endRunningJob(job);
+ job.addDiagnostic("Job received kill in SETUP state.");
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.KILLED));
}
}
@@ -1470,10 +1634,10 @@ public class JobImpl implements org.apac
taskKilled(job, task);
}
- return checkJobForCompletion(job);
+ return checkJobAfterTaskCompletion(job);
}
- protected JobStateInternal checkJobForCompletion(JobImpl job) {
+ protected JobStateInternal checkJobAfterTaskCompletion(JobImpl job) {
//check for Job failure
if (job.failedMapTaskCount*100 >
job.allowedMapFailuresPercent*job.numMapTasks ||
@@ -1486,17 +1650,13 @@ public class JobImpl implements org.apac
" failedReduces:" + job.failedReduceTaskCount;
LOG.info(diagnosticMsg);
job.addDiagnostic(diagnosticMsg);
- job.abortJob(org.apache.hadoop.mapreduce.JobStatus.State.FAILED);
- return job.finished(JobStateInternal.FAILED);
- }
-
- JobStateInternal jobCompleteSuccess = JobImpl.checkJobCompleteSuccess(job);
- if (jobCompleteSuccess != null) {
- return jobCompleteSuccess;
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.FAILED));
+ return JobStateInternal.FAIL_ABORT;
}
- //return the current state, Job not finished yet
- return job.getInternalState();
+ return job.checkReadyForCommit();
}
private void taskSucceeded(JobImpl job, Task task) {
@@ -1529,18 +1689,52 @@ public class JobImpl implements org.apac
}
// Transition class for handling jobs with no tasks
- static class JobNoTasksCompletedTransition implements
+ private static class JobNoTasksCompletedTransition implements
MultipleArcTransition<JobImpl, JobEvent, JobStateInternal> {
@Override
public JobStateInternal transition(JobImpl job, JobEvent event) {
- JobStateInternal jobCompleteSuccess = JobImpl.checkJobCompleteSuccess(job);
- if (jobCompleteSuccess != null) {
- return jobCompleteSuccess;
- }
-
- // Return the current state, Job not finished yet
- return job.getInternalState();
+ return job.checkReadyForCommit();
+ }
+ }
+
+ private static class CommitSucceededTransition implements
+ SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ job.logJobHistoryFinishedEvent();
+ job.finished(JobStateInternal.SUCCEEDED);
+ }
+ }
+
+ private static class CommitFailedTransition implements
+ SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ JobCommitFailedEvent jcfe = (JobCommitFailedEvent)event;
+ job.addDiagnostic("Job commit failed: " + jcfe.getMessage());
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.FAILED));
+ }
+ }
+
+ private static class KilledDuringCommitTransition implements
+ SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ job.setFinishTime();
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.KILLED));
+ }
+ }
+
+ private static class KilledDuringAbortTransition implements
+ SingleArcTransition<JobImpl, JobEvent> {
+ @Override
+ public void transition(JobImpl job, JobEvent event) {
+ job.unsuccessfulFinish(JobStateInternal.KILLED);
}
}
@@ -1557,11 +1751,13 @@ public class JobImpl implements org.apac
private static class KillWaitTaskCompletedTransition extends
TaskCompletedTransition {
@Override
- protected JobStateInternal checkJobForCompletion(JobImpl job) {
+ protected JobStateInternal checkJobAfterTaskCompletion(JobImpl job) {
if (job.completedTaskCount == job.tasks.size()) {
job.setFinishTime();
- job.abortJob(org.apache.hadoop.mapreduce.JobStatus.State.KILLED);
- return job.finished(JobStateInternal.KILLED);
+ job.eventHandler.handle(new CommitterJobAbortEvent(job.jobId,
+ job.jobContext,
+ org.apache.hadoop.mapreduce.JobStatus.State.KILLED));
+ return JobStateInternal.KILL_ABORT;
}
//return the current state, Job not finished yet
return job.getInternalState();
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java Thu Jan 3 21:23:58 2013
@@ -24,7 +24,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapTaskAttemptImpl;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
@@ -47,13 +46,13 @@ public class MapTaskImpl extends TaskImp
public MapTaskImpl(JobId jobId, int partition, EventHandler eventHandler,
Path remoteJobConfFile, JobConf conf,
TaskSplitMetaInfo taskSplitMetaInfo,
- TaskAttemptListener taskAttemptListener, OutputCommitter committer,
+ TaskAttemptListener taskAttemptListener,
Token<JobTokenIdentifier> jobToken,
Credentials credentials, Clock clock,
Map<TaskId, TaskInfo> completedTasksFromPreviousRun, int startCount,
MRAppMetrics metrics, AppContext appContext) {
super(jobId, TaskType.MAP, partition, eventHandler, remoteJobConfFile,
- conf, taskAttemptListener, committer, jobToken, credentials, clock,
+ conf, taskAttemptListener, jobToken, credentials, clock,
completedTasksFromPreviousRun, startCount, metrics, appContext);
this.taskSplitMetaInfo = taskSplitMetaInfo;
}
@@ -68,7 +67,7 @@ public class MapTaskImpl extends TaskImp
return new MapTaskAttemptImpl(getID(), nextAttemptNumber,
eventHandler, jobFile,
partition, taskSplitMetaInfo, conf, taskAttemptListener,
- committer, jobToken, credentials, clock, appContext);
+ jobToken, credentials, clock, appContext);
}
@Override
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java Thu Jan 3 21:23:58 2013
@@ -24,7 +24,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.ReduceTaskAttemptImpl;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@@ -46,12 +45,12 @@ public class ReduceTaskImpl extends Task
public ReduceTaskImpl(JobId jobId, int partition,
EventHandler eventHandler, Path jobFile, JobConf conf,
int numMapTasks, TaskAttemptListener taskAttemptListener,
- OutputCommitter committer, Token<JobTokenIdentifier> jobToken,
+ Token<JobTokenIdentifier> jobToken,
Credentials credentials, Clock clock,
Map<TaskId, TaskInfo> completedTasksFromPreviousRun, int startCount,
MRAppMetrics metrics, AppContext appContext) {
super(jobId, TaskType.REDUCE, partition, eventHandler, jobFile, conf,
- taskAttemptListener, committer, jobToken, credentials, clock,
+ taskAttemptListener, jobToken, credentials, clock,
completedTasksFromPreviousRun, startCount, metrics, appContext);
this.numMapTasks = numMapTasks;
}
@@ -66,7 +65,7 @@ public class ReduceTaskImpl extends Task
return new ReduceTaskAttemptImpl(getID(), nextAttemptNumber,
eventHandler, jobFile,
partition, numMapTasks, conf, taskAttemptListener,
- committer, jobToken, credentials, clock, appContext);
+ jobToken, credentials, clock, appContext);
}
@Override
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Thu Jan 3 21:23:58 2013
@@ -39,7 +39,6 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -57,7 +56,6 @@ import org.apache.hadoop.mapreduce.Count
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.MRJobConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.TypeConverter;
@@ -76,6 +74,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterTaskAbortEvent;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobDiagnosticsUpdateEvent;
@@ -99,7 +98,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
@@ -157,7 +155,6 @@ public abstract class TaskAttemptImpl im
private final Clock clock;
private final org.apache.hadoop.mapred.JobID oldJobId;
private final TaskAttemptListener taskAttemptListener;
- private final OutputCommitter committer;
private final Resource resourceCapability;
private final String[] dataLocalHosts;
private final List<String> diagnostics = new ArrayList<String>();
@@ -501,7 +498,7 @@ public abstract class TaskAttemptImpl im
public TaskAttemptImpl(TaskId taskId, int i,
EventHandler eventHandler,
TaskAttemptListener taskAttemptListener, Path jobFile, int partition,
- JobConf conf, String[] dataLocalHosts, OutputCommitter committer,
+ JobConf conf, String[] dataLocalHosts,
Token<JobTokenIdentifier> jobToken,
Credentials credentials, Clock clock,
AppContext appContext) {
@@ -525,7 +522,6 @@ public abstract class TaskAttemptImpl im
this.credentials = credentials;
this.jobToken = jobToken;
this.eventHandler = eventHandler;
- this.committer = committer;
this.jobFile = jobFile;
this.partition = partition;
@@ -1436,10 +1432,8 @@ public abstract class TaskAttemptImpl im
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
- taskAttempt.eventHandler.handle(new TaskCleanupEvent(
- taskAttempt.attemptId,
- taskAttempt.committer,
- taskContext));
+ taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
+ taskAttempt.attemptId, taskContext));
}
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java Thu Jan 3 21:23:58 2013
@@ -37,7 +37,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.MRConfig;
-import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
@@ -100,7 +99,6 @@ public abstract class TaskImpl implement
protected final JobConf conf;
protected final Path jobFile;
- protected final OutputCommitter committer;
protected final int partition;
protected final TaskAttemptListener taskAttemptListener;
protected final EventHandler eventHandler;
@@ -231,7 +229,12 @@ public abstract class TaskImpl implement
// Transitions from FAILED state
.addTransition(TaskStateInternal.FAILED, TaskStateInternal.FAILED,
EnumSet.of(TaskEventType.T_KILL,
- TaskEventType.T_ADD_SPEC_ATTEMPT))
+ TaskEventType.T_ADD_SPEC_ATTEMPT,
+ TaskEventType.T_ATTEMPT_COMMIT_PENDING,
+ TaskEventType.T_ATTEMPT_FAILED,
+ TaskEventType.T_ATTEMPT_KILLED,
+ TaskEventType.T_ATTEMPT_LAUNCHED,
+ TaskEventType.T_ATTEMPT_SUCCEEDED))
// Transitions from KILLED state
.addTransition(TaskStateInternal.KILLED, TaskStateInternal.KILLED,
@@ -273,7 +276,7 @@ public abstract class TaskImpl implement
public TaskImpl(JobId jobId, TaskType taskType, int partition,
EventHandler eventHandler, Path remoteJobConfFile, JobConf conf,
- TaskAttemptListener taskAttemptListener, OutputCommitter committer,
+ TaskAttemptListener taskAttemptListener,
Token<JobTokenIdentifier> jobToken,
Credentials credentials, Clock clock,
Map<TaskId, TaskInfo> completedTasksFromPreviousRun, int startCount,
@@ -296,7 +299,6 @@ public abstract class TaskImpl implement
this.partition = partition;
this.taskAttemptListener = taskAttemptListener;
this.eventHandler = eventHandler;
- this.committer = committer;
this.credentials = credentials;
this.jobToken = jobToken;
this.metrics = metrics;
@@ -941,6 +943,13 @@ public abstract class TaskImpl implement
task.handleTaskAttemptCompletion(
taskAttemptId,
TaskAttemptCompletionEventStatus.TIPFAILED);
+
+ // issue kill to all non finished attempts
+ for (TaskAttempt taskAttempt : task.attempts.values()) {
+ task.killUnfinishedAttempt
+ (taskAttempt, "Task has failed. Killing attempt!");
+ }
+ task.inProgressAttempts.clear();
if (task.historyTaskStartGenerated) {
TaskFailedEvent taskFailedEvent = createTaskFailedEvent(task, attempt.getDiagnostics(),
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java Thu Jan 3 21:23:58 2013
@@ -191,12 +191,9 @@ public class ContainerLauncherImpl exten
@SuppressWarnings("unchecked")
public synchronized void kill() {
- if(isCompletelyDone()) {
- return;
- }
if(this.state == ContainerState.PREP) {
this.state = ContainerState.KILLED_BEFORE_LAUNCH;
- } else {
+ } else if (!isCompletelyDone()) {
LOG.info("KILLING " + taskAttemptID);
ContainerManager proxy = null;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java Thu Jan 3 21:23:58 2013
@@ -48,6 +48,8 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.app.ControlledClock;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterTaskAbortEvent;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobDiagnosticsUpdateEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
@@ -65,8 +67,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.yarn.Clock;
@@ -339,8 +339,8 @@ public class RecoveryService extends Com
return;
}
- else if (event.getType() == TaskCleaner.EventType.TASK_CLEAN) {
- TaskAttemptId aId = ((TaskCleanupEvent) event).getAttemptID();
+ else if (event.getType() == CommitterEventType.TASK_ABORT) {
+ TaskAttemptId aId = ((CommitterTaskAbortEvent) event).getAttemptID();
LOG.debug("TASK_CLEAN");
actualHandler.handle(new TaskAttemptEvent(aId,
TaskAttemptEventType.TA_CLEANUP_DONE));
Modified: hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Thu Jan 3 21:23:58 2013
@@ -32,9 +32,12 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.WrappedJvmID;
+import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent;
@@ -49,6 +52,8 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEvent;
+import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -69,8 +74,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
-import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
@@ -394,8 +397,7 @@ public class MRApp extends MRAppMaster {
Job newJob = new TestJob(getJobId(), getAttemptID(), conf,
getDispatcher().getEventHandler(),
getTaskAttemptListener(), getContext().getClock(),
- getCommitter(), isNewApiCommitter(),
- currentUser.getUserName(), getContext());
+ isNewApiCommitter(), currentUser.getUserName(), getContext());
((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob);
getDispatcher().register(JobFinishEvent.Type.class,
@@ -515,16 +517,56 @@ public class MRApp extends MRAppMaster {
}
@Override
- protected TaskCleaner createTaskCleaner(AppContext context) {
- return new TaskCleaner() {
+ protected EventHandler<CommitterEvent> createCommitterEventHandler(
+ AppContext context, final OutputCommitter committer) {
+ // create an output committer with the task methods stubbed out
+ OutputCommitter stubbedCommitter = new OutputCommitter() {
@Override
- public void handle(TaskCleanupEvent event) {
- //send the cleanup done event
- getContext().getEventHandler().handle(
- new TaskAttemptEvent(event.getAttemptID(),
- TaskAttemptEventType.TA_CLEANUP_DONE));
+ public void setupJob(JobContext jobContext) throws IOException {
+ committer.setupJob(jobContext);
+ }
+ @SuppressWarnings("deprecation")
+ @Override
+ public void cleanupJob(JobContext jobContext) throws IOException {
+ committer.cleanupJob(jobContext);
+ }
+ @Override
+ public void commitJob(JobContext jobContext) throws IOException {
+ committer.commitJob(jobContext);
+ }
+ @Override
+ public void abortJob(JobContext jobContext, State state)
+ throws IOException {
+ committer.abortJob(jobContext, state);
+ }
+ @Override
+ public boolean isRecoverySupported() {
+ return committer.isRecoverySupported();
+ }
+ @Override
+ public void setupTask(TaskAttemptContext taskContext)
+ throws IOException {
+ }
+ @Override
+ public boolean needsTaskCommit(TaskAttemptContext taskContext)
+ throws IOException {
+ return false;
+ }
+ @Override
+ public void commitTask(TaskAttemptContext taskContext)
+ throws IOException {
+ }
+ @Override
+ public void abortTask(TaskAttemptContext taskContext)
+ throws IOException {
+ }
+ @Override
+ public void recoverTask(TaskAttemptContext taskContext)
+ throws IOException {
}
};
+
+ return new CommitterEventHandler(context, stubbedCommitter);
}
@Override
@@ -576,12 +618,11 @@ public class MRApp extends MRAppMaster {
public TestJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
Configuration conf, EventHandler eventHandler,
TaskAttemptListener taskAttemptListener, Clock clock,
- OutputCommitter committer, boolean newApiCommitter, String user,
- AppContext appContext) {
+ boolean newApiCommitter, String user, AppContext appContext) {
super(jobId, getApplicationAttemptId(applicationId, getStartCount()),
conf, eventHandler, taskAttemptListener,
new JobTokenSecretManager(), new Credentials(), clock,
- getCompletedTaskFromPreviousRun(), metrics, committer,
+ getCompletedTaskFromPreviousRun(), metrics,
newApiCommitter, user, System.currentTimeMillis(), getAllAMInfos(),
appContext);