You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zj...@apache.org on 2015/07/06 22:58:00 UTC
[38/48] hadoop git commit: HADOOP-12171. Shorten overly-long htrace
span names for server (cmccabe)
HADOOP-12171. Shorten overly-long htrace span names for server (cmccabe)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6f530f52
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6f530f52
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6f530f52
Branch: refs/heads/YARN-2928
Commit: 6f530f52f0a4faf7887d36e2f1a7c70b1fb4e1fc
Parents: d5192ca
Author: Colin Patrick Mccabe <cm...@cloudera.com>
Authored: Wed Jul 1 17:57:11 2015 -0700
Committer: Zhijie Shen <zj...@apache.org>
Committed: Mon Jul 6 11:32:00 2015 -0700
----------------------------------------------------------------------
hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++
.../org/apache/hadoop/ipc/RpcClientUtil.java | 24 ++++++++++++++++++++
.../main/java/org/apache/hadoop/ipc/Server.java | 4 +++-
.../org/apache/hadoop/tracing/TestTracing.java | 18 +++++++--------
4 files changed, 38 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f530f52/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 39e2e5e..24431ba 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -673,6 +673,8 @@ Release 2.8.0 - UNRELEASED
HADOOP-12124. Add HTrace support for FsShell (cmccabe)
+ HADOOP-12171. Shorten overly-long htrace span names for server (cmccabe)
+
OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp
http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f530f52/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
index d9bd71b..da1e699 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
@@ -210,4 +210,28 @@ public class RpcClientUtil {
}
return clazz.getSimpleName() + "#" + method.getName();
}
+
+ /**
+ * Convert an RPC class method to a string.
+ * The format we want is
+ * 'SecondOutermostClassShortName#OutermostClassShortName'.
+ *
+ * For example, if the full class name is:
+ * org.apache.hadoop.hdfs.protocol.ClientProtocol.getBlockLocations
+ *
+ * the format we want is:
+ * ClientProtocol#getBlockLocations
+ */
+ public static String toTraceName(String fullName) {
+ int lastPeriod = fullName.lastIndexOf('.');
+ if (lastPeriod < 0) {
+ return fullName;
+ }
+ int secondLastPeriod = fullName.lastIndexOf('.', lastPeriod - 1);
+ if (secondLastPeriod < 0) {
+ return fullName;
+ }
+ return fullName.substring(secondLastPeriod + 1, lastPeriod) + "#" +
+ fullName.substring(lastPeriod + 1);
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f530f52/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index 98fffc0..4026fe0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -1963,7 +1963,9 @@ public abstract class Server {
// If the incoming RPC included tracing info, always continue the trace
TraceInfo parentSpan = new TraceInfo(header.getTraceInfo().getTraceId(),
header.getTraceInfo().getParentId());
- traceSpan = Trace.startSpan(rpcRequest.toString(), parentSpan).detach();
+ traceSpan = Trace.startSpan(
+ RpcClientUtil.toTraceName(rpcRequest.toString()),
+ parentSpan).detach();
}
Call call = new Call(header.getCallId(), header.getRetryCount(),
http://git-wip-us.apache.org/repos/asf/hadoop/blob/6f530f52/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
index 58b3659..c3d2c73 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
@@ -67,18 +67,18 @@ public class TestTracing {
String[] expectedSpanNames = {
"testWriteTraceHooks",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.create",
+ "ClientProtocol#create",
"ClientNamenodeProtocol#create",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.fsync",
+ "ClientProtocol#fsync",
"ClientNamenodeProtocol#fsync",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete",
+ "ClientProtocol#complete",
"ClientNamenodeProtocol#complete",
"newStreamForCreate",
"DFSOutputStream#write",
"DFSOutputStream#close",
"dataStreamer",
"OpWriteBlockProto",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.addBlock",
+ "ClientProtocol#addBlock",
"ClientNamenodeProtocol#addBlock"
};
SetSpanReceiver.assertSpanNamesFound(expectedSpanNames);
@@ -95,11 +95,11 @@ public class TestTracing {
// and children of them are exception.
String[] spansInTopTrace = {
"testWriteTraceHooks",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.create",
+ "ClientProtocol#create",
"ClientNamenodeProtocol#create",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.fsync",
+ "ClientProtocol#fsync",
"ClientNamenodeProtocol#fsync",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete",
+ "ClientProtocol#complete",
"ClientNamenodeProtocol#complete",
"newStreamForCreate",
"DFSOutputStream#write",
@@ -113,7 +113,7 @@ public class TestTracing {
// test for timeline annotation added by HADOOP-11242
Assert.assertEquals("called",
- map.get("org.apache.hadoop.hdfs.protocol.ClientProtocol.create")
+ map.get("ClientProtocol#create")
.get(0).getTimelineAnnotations()
.get(0).getMessage());
@@ -131,7 +131,7 @@ public class TestTracing {
String[] expectedSpanNames = {
"testReadTraceHooks",
- "org.apache.hadoop.hdfs.protocol.ClientProtocol.getBlockLocations",
+ "ClientProtocol#getBlockLocations",
"ClientNamenodeProtocol#getBlockLocations",
"OpReadBlockProto"
};