You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2015/12/17 00:12:58 UTC

hadoop git commit: Revert "MAPREDUCE-6566. Add retry support to mapreduce CLI tool. Contributed by Varun Vasudev"

Repository: hadoop
Updated Branches:
  refs/heads/branch-2 36781a3a5 -> 74a011d73


Revert "MAPREDUCE-6566. Add retry support to mapreduce CLI tool. Contributed by Varun Vasudev"

This reverts commit 3b10ff6a6f7becea181506c306a52973602b0481.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/74a011d7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/74a011d7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/74a011d7

Branch: refs/heads/branch-2
Commit: 74a011d73abef1d33bbcd8e9203fc6aafea6d3ea
Parents: 36781a3
Author: Vinod Kumar Vavilapalli <vi...@apache.org>
Authored: Wed Dec 16 15:12:24 2015 -0800
Committer: Vinod Kumar Vavilapalli <vi...@apache.org>
Committed: Wed Dec 16 15:12:24 2015 -0800

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |  3 --
 .../org/apache/hadoop/mapreduce/tools/CLI.java  | 41 ++++----------------
 .../apache/hadoop/mapreduce/tools/TestCLI.java  | 31 +--------------
 3 files changed, 10 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/74a011d7/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 499840b..36d95f9 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -380,7 +380,6 @@ Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES
 
-
   NEW FEATURES
 
   IMPROVEMENTS
@@ -456,8 +455,6 @@ Release 2.7.2 - UNRELEASED
     MAPREDUCE-6451. DistCp has incorrect chunkFilePath for multiple jobs when
     strategy is dynamic (Kuhu Shukla via kihwal)
 
-    MAPREDUCE-6566. Add retry support to mapreduce CLI tool. (Varun Vasudev via xgong)
-
 Release 2.7.1 - 2015-07-06 
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/74a011d7/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
index 79fea2c..86fc3c8 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
@@ -26,7 +26,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -44,7 +43,6 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.JobPriority;
 import org.apache.hadoop.mapreduce.JobStatus;
-import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TaskCompletionEvent;
 import org.apache.hadoop.mapreduce.TaskReport;
@@ -270,7 +268,7 @@ public class CLI extends Configured implements Tool {
         System.out.println("Created job " + job.getJobID());
         exitCode = 0;
       } else if (getStatus) {
-        Job job = getJob(JobID.forName(jobid));
+        Job job = cluster.getJob(JobID.forName(jobid));
         if (job == null) {
           System.out.println("Could not find job " + jobid);
         } else {
@@ -285,7 +283,7 @@ public class CLI extends Configured implements Tool {
           exitCode = 0;
         }
       } else if (getCounter) {
-        Job job = getJob(JobID.forName(jobid));
+        Job job = cluster.getJob(JobID.forName(jobid));
         if (job == null) {
           System.out.println("Could not find job " + jobid);
         } else {
@@ -301,7 +299,7 @@ public class CLI extends Configured implements Tool {
           }
         }
       } else if (killJob) {
-        Job job = getJob(JobID.forName(jobid));
+        Job job = cluster.getJob(JobID.forName(jobid));
         if (job == null) {
           System.out.println("Could not find job " + jobid);
         } else {
@@ -325,7 +323,7 @@ public class CLI extends Configured implements Tool {
           }
         }
       } else if (setJobPriority) {
-        Job job = getJob(JobID.forName(jobid));
+        Job job = cluster.getJob(JobID.forName(jobid));
         if (job == null) {
           System.out.println("Could not find job " + jobid);
         } else {
@@ -341,7 +339,7 @@ public class CLI extends Configured implements Tool {
         viewHistory(historyFile, viewAllHistory);
         exitCode = 0;
       } else if (listEvents) {
-        listEvents(getJob(JobID.forName(jobid)), fromEvent, nEvents);
+        listEvents(cluster.getJob(JobID.forName(jobid)), fromEvent, nEvents);
         exitCode = 0;
       } else if (listJobs) {
         listJobs(cluster);
@@ -356,11 +354,11 @@ public class CLI extends Configured implements Tool {
         listBlacklistedTrackers(cluster);
         exitCode = 0;
       } else if (displayTasks) {
-        displayTasks(getJob(JobID.forName(jobid)), taskType, taskState);
+        displayTasks(cluster.getJob(JobID.forName(jobid)), taskType, taskState);
         exitCode = 0;
       } else if(killTask) {
         TaskAttemptID taskID = TaskAttemptID.forName(taskid);
-        Job job = getJob(taskID.getJobID());
+        Job job = cluster.getJob(taskID.getJobID());
         if (job == null) {
           System.out.println("Could not find job " + jobid);
         } else if (job.killTask(taskID, false)) {
@@ -372,7 +370,7 @@ public class CLI extends Configured implements Tool {
         }
       } else if(failTask) {
         TaskAttemptID taskID = TaskAttemptID.forName(taskid);
-        Job job = getJob(taskID.getJobID());
+        Job job = cluster.getJob(taskID.getJobID());
         if (job == null) {
             System.out.println("Could not find job " + jobid);
         } else if(job.killTask(taskID, true)) {
@@ -533,29 +531,6 @@ public class CLI extends Configured implements Tool {
   protected static String getTaskLogURL(TaskAttemptID taskId, String baseUrl) {
     return (baseUrl + "/tasklog?plaintext=true&attemptid=" + taskId); 
   }
-
-  @VisibleForTesting
-  Job getJob(JobID jobid) throws IOException, InterruptedException {
-
-    int maxRetry = getConf().getInt(MRJobConfig.MR_CLIENT_JOB_MAX_RETRIES,
-        MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
-    long retryInterval = getConf()
-        .getLong(MRJobConfig.MR_CLIENT_JOB_RETRY_INTERVAL,
-            MRJobConfig.DEFAULT_MR_CLIENT_JOB_RETRY_INTERVAL);
-    Job job = cluster.getJob(jobid);
-
-    for (int i = 0; i < maxRetry; ++i) {
-      if (job != null) {
-        return job;
-      }
-      LOG.info("Could not obtain job info after " + String.valueOf(i + 1)
-          + " attempt(s). Sleeping for " + String.valueOf(retryInterval / 1000)
-          + " seconds and retrying.");
-      Thread.sleep(retryInterval);
-      job = cluster.getJob(jobid);
-    }
-    return job;
-  }
   
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/74a011d7/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java
index 73f57d5..fdc916e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java
@@ -20,19 +20,14 @@ package org.apache.hadoop.mapreduce.tools;
 import static org.junit.Assert.*;
 
 import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TaskReport;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.JobPriority;
 import org.apache.hadoop.mapreduce.JobStatus;
 import org.apache.hadoop.mapreduce.JobStatus.State;
-import org.apache.hadoop.util.Time;
-import org.junit.Assert;
 import org.junit.Test;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -49,7 +44,7 @@ public class TestCLI {
     JobID jobId = JobID.forName(jobIdStr);
     Cluster mockCluster = mock(Cluster.class);
     Job job = mock(Job.class);
-    CLI cli = spy(new CLI(new Configuration()));
+    CLI cli = spy(new CLI());
 
     doReturn(mockCluster).when(cli).createCluster();
     when(job.getTaskReports(TaskType.MAP)).thenReturn(
@@ -117,7 +112,7 @@ public class TestCLI {
   @Test
   public void testJobKIll() throws Exception {
     Cluster mockCluster = mock(Cluster.class);
-    CLI cli = spy(new CLI(new Configuration()));
+    CLI cli = spy(new CLI());
     doReturn(mockCluster).when(cli).createCluster();
     String jobId1 = "job_1234654654_001";
     String jobId2 = "job_1234654654_002";
@@ -154,26 +149,4 @@ public class TestCLI {
     when(mockJob.getStatus()).thenReturn(status);
     return mockJob;
   }
-
-  @Test
-  public void testGetJob() throws Exception {
-    Configuration conf = new Configuration();
-    long sleepTime = 100;
-    conf.setLong(MRJobConfig.MR_CLIENT_JOB_RETRY_INTERVAL, sleepTime);
-    Cluster mockCluster = mock(Cluster.class);
-    JobID jobId1 = JobID.forName("job_1234654654_001");
-    when(mockCluster.getJob(jobId1)).thenReturn(null);
-
-    for (int i = 0; i < 2; ++i) {
-      conf.setInt(MRJobConfig.MR_CLIENT_JOB_MAX_RETRIES, i);
-      CLI cli = spy(new CLI(conf));
-      cli.cluster = mockCluster;
-      doReturn(mockCluster).when(cli).createCluster();
-      long start = Time.monotonicNow();
-      cli.getJob(jobId1);
-      long end = Time.monotonicNow();
-      Assert.assertTrue(end - start > (i * sleepTime));
-      Assert.assertTrue(end - start < ((i + 1) * sleepTime));
-    }
-  }
 }