You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cm...@apache.org on 2015/07/02 02:57:22 UTC

hadoop git commit: HADOOP-12171. Shorten overly-long htrace span names for server (cmccabe)

Repository: hadoop
Updated Branches:
  refs/heads/trunk 0e4b06690 -> a78d5074f


HADOOP-12171. Shorten overly-long htrace span names for server (cmccabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a78d5074
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a78d5074
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a78d5074

Branch: refs/heads/trunk
Commit: a78d5074fb3da4779a6b5fd9947e60b9d755ee14
Parents: 0e4b066
Author: Colin Patrick Mccabe <cm...@cloudera.com>
Authored: Wed Jul 1 17:57:11 2015 -0700
Committer: Colin Patrick Mccabe <cm...@cloudera.com>
Committed: Wed Jul 1 17:57:11 2015 -0700

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 ++
 .../org/apache/hadoop/ipc/RpcClientUtil.java    | 24 ++++++++++++++++++++
 .../main/java/org/apache/hadoop/ipc/Server.java |  4 +++-
 .../org/apache/hadoop/tracing/TestTracing.java  | 18 +++++++--------
 4 files changed, 38 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a78d5074/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 39e2e5e..24431ba 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -673,6 +673,8 @@ Release 2.8.0 - UNRELEASED
 
     HADOOP-12124. Add HTrace support for FsShell (cmccabe)
 
+    HADOOP-12171. Shorten overly-long htrace span names for server (cmccabe)
+
   OPTIMIZATIONS
 
     HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a78d5074/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
index d9bd71b..da1e699 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
@@ -210,4 +210,28 @@ public class RpcClientUtil {
     }
     return clazz.getSimpleName() + "#" + method.getName();
   }
+
+  /**
+   * Convert an RPC class method to a string.
+   * The format we want is
+   * 'SecondOutermostClassShortName#OutermostClassShortName'.
+   *
+   * For example, if the full class name is:
+   *   org.apache.hadoop.hdfs.protocol.ClientProtocol.getBlockLocations
+   *
+   * the format we want is:
+   *   ClientProtocol#getBlockLocations
+   */
+  public static String toTraceName(String fullName) {
+    int lastPeriod = fullName.lastIndexOf('.');
+    if (lastPeriod < 0) {
+      return fullName;
+    }
+    int secondLastPeriod = fullName.lastIndexOf('.', lastPeriod - 1);
+    if (secondLastPeriod < 0) {
+      return fullName;
+    }
+    return fullName.substring(secondLastPeriod + 1, lastPeriod) + "#" +
+        fullName.substring(lastPeriod + 1);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a78d5074/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index 98fffc0..4026fe0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -1963,7 +1963,9 @@ public abstract class Server {
         // If the incoming RPC included tracing info, always continue the trace
         TraceInfo parentSpan = new TraceInfo(header.getTraceInfo().getTraceId(),
                                              header.getTraceInfo().getParentId());
-        traceSpan = Trace.startSpan(rpcRequest.toString(), parentSpan).detach();
+        traceSpan = Trace.startSpan(
+            RpcClientUtil.toTraceName(rpcRequest.toString()),
+            parentSpan).detach();
       }
 
       Call call = new Call(header.getCallId(), header.getRetryCount(),

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a78d5074/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
index 58b3659..c3d2c73 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
@@ -67,18 +67,18 @@ public class TestTracing {
 
     String[] expectedSpanNames = {
       "testWriteTraceHooks",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.create",
+      "ClientProtocol#create",
       "ClientNamenodeProtocol#create",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.fsync",
+      "ClientProtocol#fsync",
       "ClientNamenodeProtocol#fsync",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete",
+      "ClientProtocol#complete",
       "ClientNamenodeProtocol#complete",
       "newStreamForCreate",
       "DFSOutputStream#write",
       "DFSOutputStream#close",
       "dataStreamer",
       "OpWriteBlockProto",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.addBlock",
+      "ClientProtocol#addBlock",
       "ClientNamenodeProtocol#addBlock"
     };
     SetSpanReceiver.assertSpanNamesFound(expectedSpanNames);
@@ -95,11 +95,11 @@ public class TestTracing {
     // and children of them are exception.
     String[] spansInTopTrace = {
       "testWriteTraceHooks",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.create",
+      "ClientProtocol#create",
       "ClientNamenodeProtocol#create",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.fsync",
+      "ClientProtocol#fsync",
       "ClientNamenodeProtocol#fsync",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete",
+      "ClientProtocol#complete",
       "ClientNamenodeProtocol#complete",
       "newStreamForCreate",
       "DFSOutputStream#write",
@@ -113,7 +113,7 @@ public class TestTracing {
 
     // test for timeline annotation added by HADOOP-11242
     Assert.assertEquals("called",
-        map.get("org.apache.hadoop.hdfs.protocol.ClientProtocol.create")
+        map.get("ClientProtocol#create")
            .get(0).getTimelineAnnotations()
            .get(0).getMessage());
 
@@ -131,7 +131,7 @@ public class TestTracing {
 
     String[] expectedSpanNames = {
       "testReadTraceHooks",
-      "org.apache.hadoop.hdfs.protocol.ClientProtocol.getBlockLocations",
+      "ClientProtocol#getBlockLocations",
       "ClientNamenodeProtocol#getBlockLocations",
       "OpReadBlockProto"
     };