You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ta...@apache.org on 2020/09/28 09:28:08 UTC

[hadoop] branch trunk updated: HDFS-15577. Refactor TestTracing. (#2302)

This is an automated email from the ASF dual-hosted git repository.

tasanuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new d89af79  HDFS-15577. Refactor TestTracing. (#2302)
d89af79 is described below

commit d89af7950732d24c839f88aa5ad6b4bb93725829
Author: Takanobu Asanuma <ta...@apache.org>
AuthorDate: Mon Sep 28 18:27:41 2020 +0900

    HDFS-15577. Refactor TestTracing. (#2302)
    
    Reviewed-by: Akira Ajisaka <aa...@apache.org>
---
 .../org/apache/hadoop/tracing/TestTracing.java     | 72 ++++++++++------------
 1 file changed, 32 insertions(+), 40 deletions(-)

diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
index 04c85a1..a8653d6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java
@@ -26,28 +26,22 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsTracer;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
-import org.apache.htrace.core.Sampler;
 import org.apache.htrace.core.Span;
 import org.apache.htrace.core.TraceScope;
 import org.apache.htrace.core.Tracer;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestTracing {
   private static MiniDFSCluster cluster;
   private static DistributedFileSystem dfs;
 
-  private Tracer prevTracer;
-
   private final static Configuration TRACING_CONF;
   private final static Configuration NO_TRACING_CONF;
 
@@ -87,21 +81,21 @@ public class TestTracing {
     long endTime = System.currentTimeMillis();
     ts.close();
 
-    String[] expectedSpanNames = {
-      "testWriteTraceHooks",
-      "ClientProtocol#create",
-      "ClientNamenodeProtocol#create",
-      "ClientProtocol#fsync",
-      "ClientNamenodeProtocol#fsync",
-      "ClientProtocol#complete",
-      "ClientNamenodeProtocol#complete",
-      "newStreamForCreate",
-      "DFSOutputStream#write",
-      "DFSOutputStream#close",
-      "dataStreamer",
-      "OpWriteBlockProto",
-      "ClientProtocol#addBlock",
-      "ClientNamenodeProtocol#addBlock"
+    String[] expectedSpanNames = new String[]{
+        "testWriteTraceHooks",
+        "ClientProtocol#create",
+        "ClientNamenodeProtocol#create",
+        "ClientProtocol#fsync",
+        "ClientNamenodeProtocol#fsync",
+        "ClientProtocol#complete",
+        "ClientNamenodeProtocol#complete",
+        "newStreamForCreate",
+        "DFSOutputStream#write",
+        "DFSOutputStream#close",
+        "dataStreamer",
+        "OpWriteBlockProto",
+        "ClientProtocol#addBlock",
+        "ClientNamenodeProtocol#addBlock"
     };
     SetSpanReceiver.assertSpanNamesFound(expectedSpanNames);
 
@@ -109,23 +103,21 @@ public class TestTracing {
     Map<String, List<Span>> map = SetSpanReceiver.getMap();
     Span s = map.get("testWriteTraceHooks").get(0);
     Assert.assertNotNull(s);
-    long spanStart = s.getStartTimeMillis();
-    long spanEnd = s.getStopTimeMillis();
 
     // Spans homed in the top trace shoud have same trace id.
     // Spans having multiple parents (e.g. "dataStreamer" added by HDFS-7054)
     // and children of them are exception.
-    String[] spansInTopTrace = {
-      "testWriteTraceHooks",
-      "ClientProtocol#create",
-      "ClientNamenodeProtocol#create",
-      "ClientProtocol#fsync",
-      "ClientNamenodeProtocol#fsync",
-      "ClientProtocol#complete",
-      "ClientNamenodeProtocol#complete",
-      "newStreamForCreate",
-      "DFSOutputStream#write",
-      "DFSOutputStream#close",
+    String[] spansInTopTrace = new String[]{
+        "testWriteTraceHooks",
+        "ClientProtocol#create",
+        "ClientNamenodeProtocol#create",
+        "ClientProtocol#fsync",
+        "ClientNamenodeProtocol#fsync",
+        "ClientProtocol#complete",
+        "ClientNamenodeProtocol#complete",
+        "newStreamForCreate",
+        "DFSOutputStream#write",
+        "DFSOutputStream#close",
     };
     for (String desc : spansInTopTrace) {
       for (Span span : map.get(desc)) {
@@ -150,11 +142,11 @@ public class TestTracing {
     ts.close();
     long endTime = System.currentTimeMillis();
 
-    String[] expectedSpanNames = {
-      "testReadTraceHooks",
-      "ClientProtocol#getBlockLocations",
-      "ClientNamenodeProtocol#getBlockLocations",
-      "OpReadBlockProto"
+    String[] expectedSpanNames = new String[]{
+        "testReadTraceHooks",
+        "ClientProtocol#getBlockLocations",
+        "ClientNamenodeProtocol#getBlockLocations",
+        "OpReadBlockProto"
     };
     SetSpanReceiver.assertSpanNamesFound(expectedSpanNames);
 
@@ -221,7 +213,7 @@ public class TestTracing {
   }
 
   @After
-  public void shutDown() throws IOException {
+  public void shutDown() {
     if (cluster != null) {
       cluster.shutdown();
       cluster = null;


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org