You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ge...@apache.org on 2015/04/21 23:06:14 UTC

[1/3] hadoop git commit: MAPREDUCE-6293. Set job classloader on uber-job's LocalContainerLauncher event thread. (Sangjin Lee via gera)

Repository: hadoop
Updated Branches:
  refs/heads/trunk 105afd547 -> 89ded89e8


MAPREDUCE-6293. Set job classloader on uber-job's LocalContainerLauncher event thread. (Sangjin Lee via gera)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/725eb52d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/725eb52d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/725eb52d

Branch: refs/heads/trunk
Commit: 725eb52ddc647074f0bf1cc73c3029f1352f51d5
Parents: 105afd5
Author: Gera Shegalov <ge...@apache.org>
Authored: Tue Apr 21 11:46:35 2015 -0700
Committer: Gera Shegalov <ge...@apache.org>
Committed: Tue Apr 21 13:57:22 2015 -0700

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |  3 +++
 .../hadoop/mapred/LocalContainerLauncher.java   | 20 ++++++++++++++++++++
 .../hadoop/mapreduce/v2/app/MRAppMaster.java    |  2 +-
 .../apache/hadoop/mapreduce/v2/util/MRApps.java |  2 +-
 .../apache/hadoop/mapreduce/v2/TestMRJobs.java  |  9 +++++++++
 5 files changed, 34 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/725eb52d/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index ffa01fa..0cf5c4b 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -337,6 +337,9 @@ Release 2.8.0 - UNRELEASED
     MAPREDUCE-6238. MR2 can't run local jobs with -libjars command options
     which is a regression from MR1 (zxu via rkanter)
 
+    MAPREDUCE-6293. Set job classloader on uber-job's LocalContainerLauncher
+    event thread. (Sangjin Lee via gera)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/725eb52d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
index 218ac83..ffc5326 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java
@@ -80,6 +80,7 @@ public class LocalContainerLauncher extends AbstractService implements
   private final HashSet<File> localizedFiles;
   private final AppContext context;
   private final TaskUmbilicalProtocol umbilical;
+  private final ClassLoader jobClassLoader;
   private ExecutorService taskRunner;
   private Thread eventHandler;
   private BlockingQueue<ContainerLauncherEvent> eventQueue =
@@ -87,6 +88,12 @@ public class LocalContainerLauncher extends AbstractService implements
 
   public LocalContainerLauncher(AppContext context,
                                 TaskUmbilicalProtocol umbilical) {
+    this(context, umbilical, null);
+  }
+
+  public LocalContainerLauncher(AppContext context,
+                                TaskUmbilicalProtocol umbilical,
+                                ClassLoader jobClassLoader) {
     super(LocalContainerLauncher.class.getName());
     this.context = context;
     this.umbilical = umbilical;
@@ -94,6 +101,7 @@ public class LocalContainerLauncher extends AbstractService implements
         // (TODO/FIXME:  pointless to use RPC to talk to self; should create
         // LocalTaskAttemptListener or similar:  implement umbilical protocol
         // but skip RPC stuff)
+    this.jobClassLoader = jobClassLoader;
 
     try {
       curFC = FileContext.getFileContext(curDir.toURI());
@@ -133,6 +141,18 @@ public class LocalContainerLauncher extends AbstractService implements
             setDaemon(true).setNameFormat("uber-SubtaskRunner").build());
     // create and start an event handling thread
     eventHandler = new Thread(new EventHandler(), "uber-EventHandler");
+    // if the job classloader is specified, set it onto the event handler as the
+    // thread context classloader so that it can be used by the event handler
+    // as well as the subtask runner threads
+    if (jobClassLoader != null) {
+      LOG.info("Setting " + jobClassLoader +
+          " as the context classloader of thread " + eventHandler.getName());
+      eventHandler.setContextClassLoader(jobClassLoader);
+    } else {
+      // note the current TCCL
+      LOG.info("Context classloader of thread " + eventHandler.getName() +
+          ": " + eventHandler.getContextClassLoader());
+    }
     eventHandler.start();
     super.serviceStart();
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/725eb52d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
index 5d3ad5b..9908ea5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
@@ -889,7 +889,7 @@ public class MRAppMaster extends CompositeService {
     protected void serviceStart() throws Exception {
       if (job.isUber()) {
         this.containerLauncher = new LocalContainerLauncher(context,
-            (TaskUmbilicalProtocol) taskAttemptListener);
+            (TaskUmbilicalProtocol) taskAttemptListener, jobClassLoader);
       } else {
         this.containerLauncher = new ContainerLauncherImpl(context);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/725eb52d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
index e4b43b5..6b115b3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
@@ -378,7 +378,7 @@ public class MRApps extends Apps {
   public static void setClassLoader(ClassLoader classLoader,
       Configuration conf) {
     if (classLoader != null) {
-      LOG.info("Setting classloader " + classLoader.getClass().getName() +
+      LOG.info("Setting classloader " + classLoader +
           " on the configuration and as the thread context classloader");
       conf.setClassLoader(classLoader);
       Thread.currentThread().setContextClassLoader(classLoader);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/725eb52d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
index 60e5638..2973c39 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
@@ -997,6 +997,15 @@ public class TestMRJobs {
         throws IOException, InterruptedException {
       super.setup(context);
       final Configuration conf = context.getConfiguration();
+      // check if the job classloader is enabled and verify the TCCL
+      if (conf.getBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, false)) {
+        ClassLoader tccl = Thread.currentThread().getContextClassLoader();
+        if (!(tccl instanceof ApplicationClassLoader)) {
+          throw new IOException("TCCL expected: " +
+              ApplicationClassLoader.class.getName() + ", actual: " +
+              tccl.getClass().getName());
+        }
+      }
       final String ioSortMb = conf.get(MRJobConfig.IO_SORT_MB);
       if (!TEST_IO_SORT_MB.equals(ioSortMb)) {
         throw new IOException("io.sort.mb expected: " + TEST_IO_SORT_MB


[2/3] hadoop git commit: HADOOP-11812. Implement listLocatedStatus for ViewFileSystem to speed up split calculation (gera)

Posted by ge...@apache.org.
HADOOP-11812. Implement listLocatedStatus for ViewFileSystem to speed up split calculation (gera)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6d2cf9fb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6d2cf9fb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6d2cf9fb

Branch: refs/heads/trunk
Commit: 6d2cf9fbbd02482315a091ab07af26e40cc5134f
Parents: 725eb52
Author: Gera Shegalov <ge...@apache.org>
Authored: Tue Apr 21 11:57:42 2015 -0700
Committer: Gera Shegalov <ge...@apache.org>
Committed: Tue Apr 21 13:57:23 2015 -0700

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../org/apache/hadoop/fs/LocatedFileStatus.java |   7 +-
 .../hadoop/fs/viewfs/ChRootedFileSystem.java    |  10 +-
 .../org/apache/hadoop/fs/viewfs/InodeTree.java  |   2 +-
 .../apache/hadoop/fs/viewfs/ViewFileSystem.java |  94 ++++++++++---
 .../fs/viewfs/ViewFsLocatedFileStatus.java      | 136 +++++++++++++++++++
 .../fs/viewfs/TestChRootedFileSystem.java       |  14 ++
 .../fs/viewfs/ViewFileSystemBaseTest.java       | 108 +++++++++++----
 8 files changed, 327 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index bcbffb7..9819300 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -504,6 +504,9 @@ Release 2.8.0 - UNRELEASED
     HADOOP-11785. Reduce the number of listStatus operation in distcp
     buildListing (Zoran Dimitrijevic via Colin P. McCabe)
 
+    HADOOP-11812. Implement listLocatedStatus for ViewFileSystem to speed up
+    split calculation (gera)
+
   BUG FIXES
 
     HADOOP-10027. *Compressor_deflateBytesDirect passes instance instead of

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java
index 0136894..9e920c5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocatedFileStatus.java
@@ -32,6 +32,11 @@ import org.apache.hadoop.fs.permission.FsPermission;
 public class LocatedFileStatus extends FileStatus {
   private BlockLocation[] locations;
 
+
+  public LocatedFileStatus() {
+    super();
+  }
+
   /**
    * Constructor 
    * @param stat a file status
@@ -43,7 +48,7 @@ public class LocatedFileStatus extends FileStatus {
         stat.getBlockSize(), stat.getModificationTime(),
         stat.getAccessTime(), stat.getPermission(), stat.getOwner(),
         stat.getGroup(), null, stat.getPath(), locations);
-    if (isSymlink()) {
+    if (stat.isSymlink()) {
       setSymlink(stat.getSymlink());
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
index 9650a37..18e2391 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
@@ -37,8 +37,10 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FilterFileSystem;
 import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.XAttrSetFlag;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.fs.permission.AclStatus;
 import org.apache.hadoop.fs.permission.FsAction;
@@ -240,7 +242,13 @@ class ChRootedFileSystem extends FilterFileSystem {
       throws IOException {
     return super.listStatus(fullPath(f));
   }
-  
+
+  @Override
+  public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
+      throws IOException {
+    return super.listLocatedStatus(fullPath(f));
+  }
+
   @Override
   public boolean mkdirs(final Path f, final FsPermission permission)
       throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
index 3047851..8c42cdf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
@@ -362,7 +362,7 @@ abstract class InodeTree<T> {
       kind = k;
       targetFileSystem = targetFs;
       resolvedPath = resolveP;
-      remainingPath = remainingP;  
+      remainingPath = remainingP;
     }
     
     // isInternalDir of path resolution completed within the mount table 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
index 0f77f47..43fe23f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
@@ -45,7 +45,10 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.FsServerDefaults;
+import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.UnsupportedFileSystemException;
 import org.apache.hadoop.fs.XAttrSetFlag;
 import org.apache.hadoop.fs.permission.AclEntry;
@@ -115,8 +118,7 @@ public class ViewFileSystem extends FileSystem {
    */
   private String getUriPath(final Path p) {
     checkPath(p);
-    String s = makeAbsolute(p).toUri().getPath();
-    return s;
+    return makeAbsolute(p).toUri().getPath();
   }
   
   private Path makeAbsolute(final Path f) {
@@ -282,7 +284,7 @@ public class ViewFileSystem extends FileSystem {
     }
     assert(res.remainingPath != null);
     return res.targetFileSystem.createNonRecursive(res.remainingPath, permission,
-         flags, bufferSize, replication, blockSize, progress);
+        flags, bufferSize, replication, blockSize, progress);
   }
   
   @Override
@@ -297,7 +299,7 @@ public class ViewFileSystem extends FileSystem {
     }
     assert(res.remainingPath != null);
     return res.targetFileSystem.create(res.remainingPath, permission,
-         overwrite, bufferSize, replication, blockSize, progress);
+        overwrite, bufferSize, replication, blockSize, progress);
   }
 
   
@@ -328,7 +330,7 @@ public class ViewFileSystem extends FileSystem {
     final InodeTree.ResolveResult<FileSystem> res = 
       fsState.resolve(getUriPath(fs.getPath()), true);
     return res.targetFileSystem.getFileBlockLocations(
-          new ViewFsFileStatus(fs, res.remainingPath), start, len);
+        new ViewFsFileStatus(fs, res.remainingPath), start, len);
   }
 
   @Override
@@ -340,24 +342,42 @@ public class ViewFileSystem extends FileSystem {
     return res.targetFileSystem.getFileChecksum(res.remainingPath);
   }
 
-  @Override
-  public FileStatus getFileStatus(final Path f) throws AccessControlException,
-      FileNotFoundException, IOException {
-    InodeTree.ResolveResult<FileSystem> res = 
-      fsState.resolve(getUriPath(f), true);
-    
-    // FileStatus#getPath is a fully qualified path relative to the root of 
+
+  private static FileStatus fixFileStatus(FileStatus orig,
+      Path qualified) throws IOException {
+    // FileStatus#getPath is a fully qualified path relative to the root of
     // target file system.
     // We need to change it to viewfs URI - relative to root of mount table.
-    
+
     // The implementors of RawLocalFileSystem were trying to be very smart.
-    // They implement FileStatus#getOwener lazily -- the object
+    // They implement FileStatus#getOwner lazily -- the object
     // returned is really a RawLocalFileSystem that expect the
     // FileStatus#getPath to be unchanged so that it can get owner when needed.
-    // Hence we need to interpose a new ViewFileSystemFileStatus that 
+    // Hence we need to interpose a new ViewFileSystemFileStatus that
     // works around.
+    if ("file".equals(orig.getPath().toUri().getScheme())) {
+      orig = wrapLocalFileStatus(orig, qualified);
+    }
+
+    orig.setPath(qualified);
+    return orig;
+  }
+
+  private static FileStatus wrapLocalFileStatus(FileStatus orig,
+      Path qualified) {
+    return orig instanceof LocatedFileStatus
+        ? new ViewFsLocatedFileStatus((LocatedFileStatus)orig, qualified)
+        : new ViewFsFileStatus(orig, qualified);
+  }
+
+
+  @Override
+  public FileStatus getFileStatus(final Path f) throws AccessControlException,
+      FileNotFoundException, IOException {
+    InodeTree.ResolveResult<FileSystem> res =
+      fsState.resolve(getUriPath(f), true);
     FileStatus status =  res.targetFileSystem.getFileStatus(res.remainingPath);
-    return new ViewFsFileStatus(status, this.makeQualified(f));
+    return fixFileStatus(status, this.makeQualified(f));
   }
   
   @Override
@@ -378,19 +398,51 @@ public class ViewFileSystem extends FileSystem {
     if (!res.isInternalDir()) {
       // We need to change the name in the FileStatus as described in
       // {@link #getFileStatus }
-      ChRootedFileSystem targetFs;
-      targetFs = (ChRootedFileSystem) res.targetFileSystem;
       int i = 0;
       for (FileStatus status : statusLst) {
-          String suffix = targetFs.stripOutRoot(status.getPath());
-          statusLst[i++] = new ViewFsFileStatus(status, this.makeQualified(
-              suffix.length() == 0 ? f : new Path(res.resolvedPath, suffix)));
+          statusLst[i++] = fixFileStatus(status,
+              getChrootedPath(res, status, f));
       }
     }
     return statusLst;
   }
 
   @Override
+  public RemoteIterator<LocatedFileStatus>listLocatedStatus(final Path f,
+      final PathFilter filter) throws FileNotFoundException, IOException {
+    final InodeTree.ResolveResult<FileSystem> res = fsState
+        .resolve(getUriPath(f), true);
+    final RemoteIterator<LocatedFileStatus> statusIter = res.targetFileSystem
+        .listLocatedStatus(res.remainingPath);
+
+    if (res.isInternalDir()) {
+      return statusIter;
+    }
+
+    return new RemoteIterator<LocatedFileStatus>() {
+      @Override
+      public boolean hasNext() throws IOException {
+        return statusIter.hasNext();
+      }
+
+      @Override
+      public LocatedFileStatus next() throws IOException {
+        final LocatedFileStatus status = statusIter.next();
+        return (LocatedFileStatus)fixFileStatus(status,
+            getChrootedPath(res, status, f));
+      }
+    };
+  }
+
+  private Path getChrootedPath(InodeTree.ResolveResult<FileSystem> res,
+      FileStatus status, Path f) throws IOException {
+    final String suffix = ((ChRootedFileSystem)res.targetFileSystem)
+        .stripOutRoot(status.getPath());
+    return this.makeQualified(
+        suffix.length() == 0 ? f : new Path(res.resolvedPath, suffix));
+  }
+
+  @Override
   public boolean mkdirs(final Path dir, final FsPermission permission)
       throws IOException {
     InodeTree.ResolveResult<FileSystem> res = 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsLocatedFileStatus.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsLocatedFileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsLocatedFileStatus.java
new file mode 100644
index 0000000..347a809
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFsLocatedFileStatus.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.viewfs;
+
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+
+import java.io.IOException;
+
+class ViewFsLocatedFileStatus extends LocatedFileStatus {
+  final LocatedFileStatus myFs;
+  Path modifiedPath;
+
+  ViewFsLocatedFileStatus(LocatedFileStatus locatedFileStatus, Path path) {
+    myFs = locatedFileStatus;
+    modifiedPath = path;
+  }
+
+  @Override
+  public long getLen() {
+    return myFs.getLen();
+  }
+
+  @Override
+  public boolean isFile() {
+    return myFs.isFile();
+  }
+
+  @Override
+  public boolean isDirectory() {
+    return myFs.isDirectory();
+  }
+
+  @Override
+  @SuppressWarnings("deprecation")
+  public boolean isDir() {
+    return myFs.isDirectory();
+  }
+
+  @Override
+  public boolean isSymlink() {
+    return myFs.isSymlink();
+  }
+
+  @Override
+  public long getBlockSize() {
+    return myFs.getBlockSize();
+  }
+
+  @Override
+  public short getReplication() {
+    return myFs.getReplication();
+  }
+
+  @Override
+  public long getModificationTime() {
+    return myFs.getModificationTime();
+  }
+
+  @Override
+  public long getAccessTime() {
+    return myFs.getAccessTime();
+  }
+
+  @Override
+  public FsPermission getPermission() {
+    return myFs.getPermission();
+  }
+
+  @Override
+  public String getOwner() {
+    return myFs.getOwner();
+  }
+
+  @Override
+  public String getGroup() {
+    return myFs.getGroup();
+  }
+
+  @Override
+  public Path getPath() {
+    return modifiedPath;
+  }
+
+  @Override
+  public void setPath(final Path p) {
+    modifiedPath = p;
+  }
+
+  @Override
+  public Path getSymlink() throws IOException {
+    return myFs.getSymlink();
+  }
+
+  @Override
+  public void setSymlink(Path p) {
+    myFs.setSymlink(p);
+  }
+
+  @Override
+  public BlockLocation[] getBlockLocations() {
+    return myFs.getBlockLocations();
+  }
+
+  @Override
+  public int compareTo(Object o) {
+    return super.compareTo(o);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return super.equals(o);
+  }
+
+  @Override
+  public int hashCode() {
+    return super.hashCode();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
index e8d4656..a13ee8d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
@@ -395,6 +395,20 @@ public class TestChRootedFileSystem {
     verify(mockFs).getAclStatus(rawPath);
   }
 
+  @Test
+  public void testListLocatedFileStatus() throws IOException {
+    final Path mockMount = new Path("mockfs://foo/user");
+    final Path mockPath = new Path("/usermock");
+    final Configuration conf = new Configuration();
+    conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
+    ConfigUtil.addLink(conf, mockPath.toString(), mockMount.toUri());
+    FileSystem vfs = FileSystem.get(URI.create("viewfs:///"), conf);
+    vfs.listLocatedStatus(mockPath);
+    final FileSystem mockFs = ((MockFileSystem)mockMount.getFileSystem(conf))
+        .getRawFileSystem();
+    verify(mockFs).listLocatedStatus(new Path(mockMount.toUri().getPath()));
+  }
+
   static class MockFileSystem extends FilterFileSystem {
     MockFileSystem() {
       super(mock(FileSystem.class));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d2cf9fb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
index a324556..18769c2 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.fs.viewfs;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.List;
@@ -36,9 +37,11 @@ import static org.junit.Assert.assertFalse;
 
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsConstants;
+import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.AclStatus;
 import org.apache.hadoop.fs.permission.AclUtil;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
@@ -125,7 +128,7 @@ public class ViewFileSystemBaseTest {
   
   void setupMountPoints() {
     ConfigUtil.addLink(conf, "/targetRoot", targetTestRoot.toUri());
-    ConfigUtil.addLink(conf, "/user", new Path(targetTestRoot,"user").toUri());
+    ConfigUtil.addLink(conf, "/user", new Path(targetTestRoot, "user").toUri());
     ConfigUtil.addLink(conf, "/user2", new Path(targetTestRoot,"user").toUri());
     ConfigUtil.addLink(conf, "/data", new Path(targetTestRoot,"data").toUri());
     ConfigUtil.addLink(conf, "/internalDir/linkToDir2",
@@ -133,9 +136,9 @@ public class ViewFileSystemBaseTest {
     ConfigUtil.addLink(conf, "/internalDir/internalDir2/linkToDir3",
         new Path(targetTestRoot,"dir3").toUri());
     ConfigUtil.addLink(conf, "/danglingLink",
-        new Path(targetTestRoot,"missingTarget").toUri());
+        new Path(targetTestRoot, "missingTarget").toUri());
     ConfigUtil.addLink(conf, "/linkToAFile",
-        new Path(targetTestRoot,"aFile").toUri());
+        new Path(targetTestRoot, "aFile").toUri());
   }
   
   @Test
@@ -204,19 +207,28 @@ public class ViewFileSystemBaseTest {
         fsView.makeQualified(new Path("/foo/bar")));
   }
 
-  
-  /** 
-   * Test modify operations (create, mkdir, delete, etc) 
+  @Test
+  public void testLocatedOperationsThroughMountLinks() throws IOException {
+    testOperationsThroughMountLinksInternal(true);
+  }
+
+  @Test
+  public void testOperationsThroughMountLinks() throws IOException {
+    testOperationsThroughMountLinksInternal(false);
+  }
+
+  /**
+   * Test modify operations (create, mkdir, delete, etc)
    * on the mount file system where the pathname references through
    * the mount points.  Hence these operation will modify the target
    * file system.
-   * 
+   *
    * Verify the operation via mountfs (ie fSys) and *also* via the
    *  target file system (ie fSysLocal) that the mount link points-to.
    */
-  @Test
-  public void testOperationsThroughMountLinks() throws IOException {
-    // Create file 
+  private void testOperationsThroughMountLinksInternal(boolean located)
+      throws IOException {
+    // Create file
     fileSystemTestHelper.createFile(fsView, "/user/foo");
     Assert.assertTrue("Created file should be type file",
         fsView.isFile(new Path("/user/foo")));
@@ -329,7 +341,8 @@ public class ViewFileSystemBaseTest {
     fsView.mkdirs(new Path("/targetRoot/dirFoo"));
     Assert.assertTrue(fsView.exists(new Path("/targetRoot/dirFoo")));
     boolean dirFooPresent = false;
-    for (FileStatus fileStatus : fsView.listStatus(new Path("/targetRoot/"))) {
+    for (FileStatus fileStatus :
+        listStatusInternal(located, new Path("/targetRoot/"))) {
       if (fileStatus.getPath().getName().equals("dirFoo")) {
         dirFooPresent = true;
       }
@@ -394,9 +407,13 @@ public class ViewFileSystemBaseTest {
       i++;     
     } 
   }
-  
-  
-  
+
+  @Test
+  public void testLocatedListOnInternalDirsOfMountTable() throws IOException {
+    testListOnInternalDirsOfMountTableInternal(true);
+  }
+
+
   /**
    * Test "readOps" (e.g. list, listStatus) 
    * on internal dirs of mount table
@@ -406,15 +423,20 @@ public class ViewFileSystemBaseTest {
   // test list on internal dirs of mount table 
   @Test
   public void testListOnInternalDirsOfMountTable() throws IOException {
+    testListOnInternalDirsOfMountTableInternal(false);
+  }
+
+  private void testListOnInternalDirsOfMountTableInternal(boolean located)
+      throws IOException {
     
     // list on Slash
-    
-    FileStatus[] dirPaths = fsView.listStatus(new Path("/"));
+
+    FileStatus[] dirPaths = listStatusInternal(located, new Path("/"));
     FileStatus fs;
     verifyRootChildren(dirPaths);
 
     // list on internal dir
-    dirPaths = fsView.listStatus(new Path("/internalDir"));
+    dirPaths = listStatusInternal(located, new Path("/internalDir"));
     Assert.assertEquals(2, dirPaths.length);
 
     fs = fileSystemTestHelper.containsPath(fsView, "/internalDir/internalDir2", dirPaths);
@@ -452,13 +474,26 @@ public class ViewFileSystemBaseTest {
   
   @Test
   public void testListOnMountTargetDirs() throws IOException {
-    FileStatus[] dirPaths = fsView.listStatus(new Path("/data"));
+    testListOnMountTargetDirsInternal(false);
+  }
+
+  @Test
+  public void testLocatedListOnMountTargetDirs() throws IOException {
+    testListOnMountTargetDirsInternal(true);
+  }
+
+  private void testListOnMountTargetDirsInternal(boolean located)
+      throws IOException {
+    final Path dataPath = new Path("/data");
+
+    FileStatus[] dirPaths = listStatusInternal(located, dataPath);
+
     FileStatus fs;
     Assert.assertEquals(0, dirPaths.length);
     
     // add a file
     long len = fileSystemTestHelper.createFile(fsView, "/data/foo");
-    dirPaths = fsView.listStatus(new Path("/data"));
+    dirPaths = listStatusInternal(located, dataPath);
     Assert.assertEquals(1, dirPaths.length);
     fs = fileSystemTestHelper.containsPath(fsView, "/data/foo", dirPaths);
     Assert.assertNotNull(fs);
@@ -467,7 +502,7 @@ public class ViewFileSystemBaseTest {
     
     // add a dir
     fsView.mkdirs(fileSystemTestHelper.getTestRootPath(fsView, "/data/dirX"));
-    dirPaths = fsView.listStatus(new Path("/data"));
+    dirPaths = listStatusInternal(located, dataPath);
     Assert.assertEquals(2, dirPaths.length);
     fs = fileSystemTestHelper.containsPath(fsView, "/data/foo", dirPaths);
     Assert.assertNotNull(fs);
@@ -476,7 +511,23 @@ public class ViewFileSystemBaseTest {
     Assert.assertNotNull(fs);
     Assert.assertTrue("Created dir should appear as a dir", fs.isDirectory()); 
   }
-      
+
+  private FileStatus[] listStatusInternal(boolean located, Path dataPath) throws IOException {
+    FileStatus[] dirPaths = new FileStatus[0];
+    if (located) {
+      RemoteIterator<LocatedFileStatus> statIter =
+          fsView.listLocatedStatus(dataPath);
+      ArrayList<LocatedFileStatus> tmp = new ArrayList<LocatedFileStatus>(10);
+      while (statIter.hasNext()) {
+        tmp.add(statIter.next());
+      }
+      dirPaths = tmp.toArray(dirPaths);
+    } else {
+      dirPaths = fsView.listStatus(dataPath);
+    }
+    return dirPaths;
+  }
+
   @Test
   public void testFileStatusOnMountLink() throws IOException {
     Assert.assertTrue(fsView.getFileStatus(new Path("/")).isDirectory());
@@ -692,11 +743,21 @@ public class ViewFileSystemBaseTest {
     Assert.assertTrue("Created file should be type file",
         fsView.isFile(new Path("/user/foo")));
     Assert.assertTrue("Target of created file should be type file",
-        fsTarget.isFile(new Path(targetTestRoot,"user/foo")));
+        fsTarget.isFile(new Path(targetTestRoot, "user/foo")));
   }
 
   @Test
   public void testRootReadableExecutable() throws IOException {
+    testRootReadableExecutableInternal(false);
+  }
+
+  @Test
+  public void testLocatedRootReadableExecutable() throws IOException {
+    testRootReadableExecutableInternal(true);
+  }
+
+  private void testRootReadableExecutableInternal(boolean located)
+      throws IOException {
     // verify executable permission on root: cd /
     //
     Assert.assertFalse("In root before cd",
@@ -707,7 +768,8 @@ public class ViewFileSystemBaseTest {
 
     // verify readable
     //
-    verifyRootChildren(fsView.listStatus(fsView.getWorkingDirectory()));
+    verifyRootChildren(listStatusInternal(located,
+        fsView.getWorkingDirectory()));
 
     // verify permissions
     //


[3/3] hadoop git commit: MAPREDUCE-6297. Task Id of the failed task in diagnostics should link to the task page. (Siqi Li via gera)

Posted by ge...@apache.org.
MAPREDUCE-6297. Task Id of the failed task in diagnostics should link to the task page. (Siqi Li via gera)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/89ded89e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/89ded89e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/89ded89e

Branch: refs/heads/trunk
Commit: 89ded89e86e5d9a634d92a5d8a7c889744d97f94
Parents: 6d2cf9f
Author: Gera Shegalov <ge...@apache.org>
Authored: Tue Apr 21 12:36:37 2015 -0700
Committer: Gera Shegalov <ge...@apache.org>
Committed: Tue Apr 21 13:57:23 2015 -0700

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |  3 ++
 .../org/apache/hadoop/mapreduce/TaskID.java     | 35 ++++++++------------
 .../mapreduce/v2/hs/webapp/HsJobBlock.java      |  8 ++++-
 .../mapreduce/v2/hs/webapp/TestBlocks.java      | 20 ++++++++++-
 .../v2/hs/webapp/TestHsWebServicesTasks.java    | 27 ++++++++++-----
 5 files changed, 60 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/89ded89e/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 0cf5c4b..ccdf6d6 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -340,6 +340,9 @@ Release 2.8.0 - UNRELEASED
     MAPREDUCE-6293. Set job classloader on uber-job's LocalContainerLauncher
     event thread. (Sangjin Lee via gera)
 
+    MAPREDUCE-6297. Task Id of the failed task in diagnostics should link to
+    the task page. (Siqi Li via gera)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/89ded89e/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
index 488ffcc..b9817dd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
@@ -25,6 +25,8 @@ import java.text.NumberFormat;
 import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -58,6 +60,9 @@ import org.apache.hadoop.io.WritableUtils;
 public class TaskID extends org.apache.hadoop.mapred.ID {
   protected static final String TASK = "task";
   protected static final NumberFormat idFormat = NumberFormat.getInstance();
+  public static final String TASK_ID_REGEX = TASK + "_(\\d+)_(\\d+)_" +
+      CharTaskTypeMaps.allTaskTypes + "_(\\d+)";
+  public static final Pattern taskIdPattern = Pattern.compile(TASK_ID_REGEX);
   static {
     idFormat.setGroupingUsed(false);
     idFormat.setMinimumIntegerDigits(6);
@@ -207,29 +212,15 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
     throws IllegalArgumentException {
     if(str == null)
       return null;
-    String exceptionMsg = null;
-    try {
-      String[] parts = str.split("_");
-      if(parts.length == 5) {
-        if(parts[0].equals(TASK)) {
-          String type = parts[3];
-          TaskType t = CharTaskTypeMaps.getTaskType(type.charAt(0));
-          if(t != null) {
-          
-            return new org.apache.hadoop.mapred.TaskID(parts[1], 
-                                                     Integer.parseInt(parts[2]),
-                                                     t, 
-                                                     Integer.parseInt(parts[4]));
-          } else
-            exceptionMsg = "Bad TaskType identifier. TaskId string : " + str
-                + " is not properly formed.";
-        }
-      }
-    }catch (Exception ex) {//fall below
-    }
-    if (exceptionMsg == null) {
-      exceptionMsg = "TaskId string : " + str + " is not properly formed";
+    Matcher m = taskIdPattern.matcher(str);
+    if (m.matches()) {
+      return new org.apache.hadoop.mapred.TaskID(m.group(1),
+          Integer.parseInt(m.group(2)),
+          CharTaskTypeMaps.getTaskType(m.group(3).charAt(0)),
+          Integer.parseInt(m.group(4)));
     }
+    String exceptionMsg = "TaskId string : " + str + " is not properly formed" +
+        "\nReason: " + m.toString();
     throw new IllegalArgumentException(exceptionMsg);
   }
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/89ded89e/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
index 4a13e0b..f3341a6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
@@ -27,6 +27,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
 import java.util.Date;
 import java.util.List;
 
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
@@ -98,7 +99,7 @@ public class HsJobBlock extends HtmlBlock {
     if(diagnostics != null && !diagnostics.isEmpty()) {
       StringBuffer b = new StringBuffer();
       for(String diag: diagnostics) {
-        b.append(diag);
+        b.append(addTaskLinks(diag));
       }
       infoBlock._("Diagnostics:", b.toString());
     }
@@ -203,4 +204,9 @@ public class HsJobBlock extends HtmlBlock {
        _().
      _();
   }
+
+  static String addTaskLinks(String text) {
+    return TaskID.taskIdPattern.matcher(text).replaceAll(
+        "<a href=\"/jobhistory/task/$0\">$0</a>");
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/89ded89e/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
index 7231367..b82965a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java
@@ -65,6 +65,7 @@ import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
 import org.apache.hadoop.yarn.webapp.view.BlockForTest;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block;
+import org.junit.Assert;
 import org.junit.Test;
 
 import static org.junit.Assert.*;
@@ -78,6 +79,23 @@ import static org.mockito.Mockito.*;
 public class TestBlocks {
   private ByteArrayOutputStream data = new ByteArrayOutputStream();
 
+  @Test
+  public void testPullTaskLink(){
+    Task task = getTask(0);
+    String taskId = task.getID().toString();
+
+    Assert.assertEquals("pull links doesn't work correctly",
+        "Task failed <a href=\"/jobhistory/task/" + taskId + "\">" +
+        taskId + "</a>"
+        , HsJobBlock.addTaskLinks("Task failed " + taskId));
+
+    Assert.assertEquals("pull links doesn't work correctly",
+        "Task failed <a href=\"/jobhistory/task/" + taskId + "\">" +
+        taskId + "</a>\n Job failed as tasks failed. failedMaps:1 failedReduces:0"
+        , HsJobBlock.addTaskLinks("Task failed " + taskId + "\n " +
+        "Job failed as tasks failed. failedMaps:1 failedReduces:0"));
+  }
+
   /**
    * test HsTasksBlock's rendering.
    */
@@ -241,7 +259,7 @@ public class TestBlocks {
     assertEquals(HsAttemptsPage.class, controller.attemptsPage());
 
     controller.set(AMParams.JOB_ID, "job_01_01");
-    controller.set(AMParams.TASK_ID, "task_01_01_m01_01");
+    controller.set(AMParams.TASK_ID, "task_01_01_m_01");
     controller.set(AMParams.TASK_TYPE, "m");
     controller.set(AMParams.ATTEMPT_STATE, "State");
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/89ded89e/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
index ee0ccc6..22fa46a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
@@ -33,6 +33,7 @@ import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.TaskID;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
@@ -368,9 +369,11 @@ public class TestHsWebServicesTasks extends JerseyTest {
         String message = exception.getString("message");
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
-        WebServicesTestUtils.checkStringMatch("exception message",
+        WebServicesTestUtils.checkStringEqual("exception message",
             "java.lang.Exception: TaskId string : "
-                + "bogustaskid is not properly formed", message);
+                + "bogustaskid is not properly formed"
+                + "\nReason: java.util.regex.Matcher[pattern=" +
+                TaskID.TASK_ID_REGEX + " region=0,11 lastmatch=]", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -432,9 +435,11 @@ public class TestHsWebServicesTasks extends JerseyTest {
         String message = exception.getString("message");
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
-        WebServicesTestUtils.checkStringMatch("exception message",
-            "java.lang.Exception: Bad TaskType identifier. TaskId string : "
-                + "task_0_0000_d_000000 is not properly formed.", message);
+        WebServicesTestUtils.checkStringEqual("exception message",
+            "java.lang.Exception: TaskId string : "
+                + "task_0_0000_d_000000 is not properly formed" +
+                "\nReason: java.util.regex.Matcher[pattern=" +
+                TaskID.TASK_ID_REGEX + " region=0,20 lastmatch=]", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -464,9 +469,11 @@ public class TestHsWebServicesTasks extends JerseyTest {
         String message = exception.getString("message");
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
-        WebServicesTestUtils.checkStringMatch("exception message",
+        WebServicesTestUtils.checkStringEqual("exception message",
             "java.lang.Exception: TaskId string : "
-                + "task_0000_m_000000 is not properly formed", message);
+                + "task_0000_m_000000 is not properly formed" +
+                "\nReason: java.util.regex.Matcher[pattern=" +
+                TaskID.TASK_ID_REGEX + " region=0,18 lastmatch=]", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",
@@ -496,9 +503,11 @@ public class TestHsWebServicesTasks extends JerseyTest {
         String message = exception.getString("message");
         String type = exception.getString("exception");
         String classname = exception.getString("javaClassName");
-        WebServicesTestUtils.checkStringMatch("exception message",
+        WebServicesTestUtils.checkStringEqual("exception message",
             "java.lang.Exception: TaskId string : "
-                + "task_0_0000_m is not properly formed", message);
+                + "task_0_0000_m is not properly formed" +
+                "\nReason: java.util.regex.Matcher[pattern=" +
+                TaskID.TASK_ID_REGEX + " region=0,13 lastmatch=]", message);
         WebServicesTestUtils.checkStringMatch("exception type",
             "NotFoundException", type);
         WebServicesTestUtils.checkStringMatch("exception classname",