You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/11/11 18:34:24 UTC

[2/2] hbase git commit: HBASE-18601: Update Htrace to 4.2

HBASE-18601: Update Htrace to 4.2

Updated HTrace version to 4.2
Created TraceUtil class to wrap htrace methods. Uses try with resources.

Signed-off-by: Balazs Meszaros <ba...@cloudera.com>
Signed-off-by: Michael Stack <st...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/377174d3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/377174d3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/377174d3

Branch: refs/heads/master
Commit: 377174d3ef95e020573a824e71c530b8391e67c3
Parents: bff619e
Author: Tamas Penzes <ta...@cloudera.com>
Authored: Thu Oct 26 23:23:42 2017 +0200
Committer: Michael Stack <st...@apache.org>
Committed: Sat Nov 11 10:34:03 2017 -0800

----------------------------------------------------------------------
 hbase-backup/pom.xml                            |  10 ++
 hbase-client/pom.xml                            |  12 +-
 .../hbase/client/AsyncRequestFutureImpl.java    |  13 +-
 .../client/ResultBoundedCompletionService.java  |   4 +-
 .../hadoop/hbase/ipc/BlockingRpcConnection.java |  22 +--
 .../java/org/apache/hadoop/hbase/ipc/Call.java  |   6 +-
 .../org/apache/hadoop/hbase/ipc/IPCUtil.java    |   8 +-
 .../hbase/zookeeper/RecoverableZooKeeper.java   |  86 +++---------
 hbase-common/pom.xml                            |  14 +-
 .../hbase/trace/HBaseHTraceConfiguration.java   |  11 +-
 .../hadoop/hbase/trace/SpanReceiverHost.java    |  20 ++-
 .../apache/hadoop/hbase/trace/TraceUtil.java    | 124 +++++++++++++++++
 hbase-endpoint/pom.xml                          |  24 ++++
 hbase-examples/pom.xml                          |  18 +++
 hbase-external-blockcache/pom.xml               |  10 +-
 .../hbase/io/hfile/MemcachedBlockCache.java     |  37 ++---
 hbase-hadoop2-compat/pom.xml                    |   6 +
 hbase-it/pom.xml                                |  20 ++-
 .../hadoop/hbase/mttr/IntegrationTestMTTR.java  |  38 +++---
 .../trace/IntegrationTestSendTraceRequests.java |  53 +++-----
 hbase-mapreduce/pom.xml                         |  32 ++++-
 .../hbase/mapreduce/TableMapReduceUtil.java     |   2 +-
 .../hadoop/hbase/PerformanceEvaluation.java     |  33 ++---
 hbase-procedure/pom.xml                         |   6 +
 hbase-protocol-shaded/pom.xml                   |   4 +
 hbase-replication/pom.xml                       |  10 ++
 hbase-rest/pom.xml                              |  16 +++
 hbase-rsgroup/pom.xml                           |  10 ++
 hbase-server/pom.xml                            |   5 +
 .../hadoop/hbase/executor/EventHandler.java     |  16 +--
 .../hadoop/hbase/io/hfile/HFileReaderImpl.java  |  28 ++--
 .../org/apache/hadoop/hbase/ipc/CallRunner.java |  19 +--
 .../apache/hadoop/hbase/ipc/NettyRpcServer.java |   2 +-
 .../hadoop/hbase/ipc/NettyServerCall.java       |   5 +-
 .../hbase/ipc/NettyServerRpcConnection.java     |   5 +-
 .../org/apache/hadoop/hbase/ipc/RpcCall.java    |   6 -
 .../org/apache/hadoop/hbase/ipc/ServerCall.java |  10 +-
 .../hadoop/hbase/ipc/ServerRpcConnection.java   |  12 +-
 .../hadoop/hbase/ipc/SimpleRpcServer.java       |   2 +-
 .../hadoop/hbase/ipc/SimpleServerCall.java      |   5 +-
 .../hbase/ipc/SimpleServerRpcConnection.java    |   7 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   2 +
 .../hadoop/hbase/master/HMasterCommandLine.java |   3 +
 .../hadoop/hbase/regionserver/HRegion.java      |  73 +++++-----
 .../hbase/regionserver/HRegionServer.java       |   2 +
 .../regionserver/HRegionServerCommandLine.java  |   2 +
 .../hbase/regionserver/MemStoreFlusher.java     |  19 ++-
 .../hbase/regionserver/wal/AbstractFSWAL.java   |  38 +++---
 .../hbase/regionserver/wal/AsyncFSWAL.java      |  72 +++++-----
 .../hadoop/hbase/regionserver/wal/FSHLog.java   |  56 ++++----
 .../hbase/regionserver/wal/FSWALEntry.java      |   2 +-
 .../hbase/regionserver/wal/RingBufferTruck.java |   2 +-
 .../hbase/regionserver/wal/SyncFuture.java      |   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java       |   3 +
 .../hbase/ipc/TestSimpleRpcScheduler.java       |   2 +-
 .../hadoop/hbase/trace/TestHTraceHooks.java     | 101 ++++++--------
 .../apache/hadoop/hbase/trace/TraceTree.java    | 134 +++++++++++++++++++
 .../hbase/wal/WALPerformanceEvaluation.java     |  44 +++---
 hbase-shell/pom.xml                             |  28 ++++
 .../src/main/ruby/shell/commands/trace.rb       |   4 +-
 hbase-spark/pom.xml                             |  12 ++
 hbase-testing-util/pom.xml                      |  30 +++++
 hbase-thrift/pom.xml                            |  22 +++
 pom.xml                                         |  61 +++++++--
 src/main/asciidoc/_chapters/tracing.adoc        |   4 +-
 65 files changed, 979 insertions(+), 510 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-backup/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-backup/pom.xml b/hbase-backup/pom.xml
index 147f17f..6282471 100644
--- a/hbase-backup/pom.xml
+++ b/hbase-backup/pom.xml
@@ -175,6 +175,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>net.java.dev.jets3t</groupId>
               <artifactId>jets3t</artifactId>
             </exclusion>
@@ -287,6 +291,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
     </profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index 675e813..cc112d4 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -166,7 +166,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.htrace</groupId>
-      <artifactId>htrace-core</artifactId>
+      <artifactId>htrace-core4</artifactId>
     </dependency>
     <dependency>
       <groupId>org.jruby.jcodings</groupId>
@@ -259,6 +259,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>net.java.dev.jets3t</groupId>
               <artifactId>jets3t</artifactId>
             </exclusion>
@@ -326,6 +330,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
     </profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
index 4df1768..91225a7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.RetryImmediatelyException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.backoff.ServerStatistics;
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
@@ -56,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.htrace.Trace;
+import org.apache.htrace.core.Tracer;
 
 /**
  * The context, and return value, for a single submit/submitAll call.
@@ -582,7 +583,13 @@ class AsyncRequestFutureImpl<CResult> implements AsyncRequestFuture {
       asyncProcess.incTaskCounters(multiAction.getRegions(), server);
       SingleServerRequestRunnable runnable = createSingleServerRequest(
               multiAction, numAttempt, server, callsInProgress);
-      return Collections.singletonList(Trace.wrap("AsyncProcess.sendMultiAction", runnable));
+      Tracer tracer = Tracer.curThreadTracer();
+
+      if (tracer == null) {
+        return Collections.singletonList(runnable);
+      } else {
+        return Collections.singletonList(tracer.wrap(runnable, "AsyncProcess.sendMultiAction"));
+      }
     }
 
     // group the actions by the amount of delay
@@ -618,7 +625,7 @@ class AsyncRequestFutureImpl<CResult> implements AsyncRequestFuture {
           asyncProcess.connection.getConnectionMetrics().incrNormalRunners();
         }
       }
-      runnable = Trace.wrap(traceText, runnable);
+      runnable = TraceUtil.wrap(runnable, traceText);
       toReturn.add(runnable);
 
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
index ccfe6ba..b05ad64 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultBoundedCompletionService.java
@@ -28,9 +28,9 @@ import java.util.concurrent.TimeoutException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.htrace.Trace;
 
 /**
  * A completion service for the RpcRetryingCallerFactory.
@@ -168,7 +168,7 @@ public class ResultBoundedCompletionService<V> {
 
   public void submit(RetryingCallable<V> task, int callTimeout, int id) {
     QueueingFuture<V> newFuture = new QueueingFuture<>(task, callTimeout, id);
-    executor.execute(Trace.wrap(newFuture));
+    executor.execute(TraceUtil.wrap(newFuture, "ResultBoundedCompletionService.submit"));
     tasks[id] = newFuture;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
index 0524336..fcc6f7c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
@@ -24,10 +24,6 @@ import static org.apache.hadoop.hbase.ipc.IPCUtil.isFatalConnectionException;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled;
 import static org.apache.hadoop.hbase.ipc.IPCUtil.write;
 
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.DataInputStream;
@@ -55,10 +51,15 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
 import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController.CancellationCallback;
+import org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
+import org.apache.hadoop.hbase.security.SaslUtil;
+import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
@@ -66,17 +67,15 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHea
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
-import org.apache.hadoop.hbase.security.HBaseSaslRpcClient;
-import org.apache.hadoop.hbase.security.SaslUtil;
-import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.ExceptionUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
+import org.apache.htrace.core.TraceScope;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Thread that reads responses and notifies callers. Each connection owns a socket connected to a
@@ -574,7 +573,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
   }
 
   private void tracedWriteRequest(Call call) throws IOException {
-    try (TraceScope ignored = Trace.startSpan("RpcClientImpl.tracedWriteRequest", call.span)) {
+    try (TraceScope ignored = TraceUtil.createTrace("RpcClientImpl.tracedWriteRequest",
+          call.span)) {
       writeRequest(call);
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
index 5c0689a..72f03f9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.CellScanner;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.MetricsConnection;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.htrace.Span;
-import org.apache.htrace.Trace;
+import org.apache.htrace.core.Span;
+import org.apache.htrace.core.Tracer;
 
 /** A call waiting for a value. */
 @InterfaceAudience.Private
@@ -73,7 +73,7 @@ class Call {
     this.timeout = timeout;
     this.priority = priority;
     this.callback = callback;
-    this.span = Trace.currentSpan();
+    this.span = Tracer.getCurrentSpan();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index 7c0ddf0..8e3e9aa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.exceptions.ConnectionClosingException;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.ipc.RemoteException;
@@ -102,10 +101,11 @@ class IPCUtil {
   static RequestHeader buildRequestHeader(Call call, CellBlockMeta cellBlockMeta) {
     RequestHeader.Builder builder = RequestHeader.newBuilder();
     builder.setCallId(call.id);
-    if (call.span != null) {
+    //TODO handle htrace API change, see HBASE-18895
+    /*if (call.span != null) {
       builder.setTraceInfo(RPCTInfo.newBuilder().setParentId(call.span.getSpanId())
-          .setTraceId(call.span.getTraceId()));
-    }
+          .setTraceId(call.span.getTracerId()));
+    }*/
     builder.setMethodName(call.md.getName());
     builder.setRequestParam(call.param != null);
     if (cellBlockMeta != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
index 94377c0..04f709f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
@@ -33,8 +33,9 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.RetryCounter;
 import org.apache.hadoop.hbase.util.RetryCounterFactory;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.trace.TraceUtil;
+import org.apache.htrace.core.TraceScope;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.AsyncCallback;
 import org.apache.zookeeper.CreateMode;
@@ -156,11 +157,8 @@ public class RecoverableZooKeeper {
    * This function will not throw NoNodeException if the path does not
    * exist.
    */
-  public void delete(String path, int version)
-  throws InterruptedException, KeeperException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.delete");
+  public void delete(String path, int version) throws InterruptedException, KeeperException {
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.delete")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       boolean isRetry = false; // False for first attempt, true for all retries.
       while (true) {
@@ -197,8 +195,6 @@ public class RecoverableZooKeeper {
         retryCounter.sleepUntilNextRetry();
         isRetry = true;
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -206,11 +202,8 @@ public class RecoverableZooKeeper {
    * exists is an idempotent operation. Retry before throwing exception
    * @return A Stat instance
    */
-  public Stat exists(String path, Watcher watcher)
-  throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.exists");
+  public Stat exists(String path, Watcher watcher) throws KeeperException, InterruptedException {
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.exists")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -236,8 +229,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -245,11 +236,8 @@ public class RecoverableZooKeeper {
    * exists is an idempotent operation. Retry before throwing exception
    * @return A Stat instance
    */
-  public Stat exists(String path, boolean watch)
-  throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.exists");
+  public Stat exists(String path, boolean watch) throws KeeperException, InterruptedException {
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.exists")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -275,8 +263,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -297,9 +283,7 @@ public class RecoverableZooKeeper {
    */
   public List<String> getChildren(String path, Watcher watcher)
     throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.getChildren");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.getChildren")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -325,8 +309,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -336,9 +318,7 @@ public class RecoverableZooKeeper {
    */
   public List<String> getChildren(String path, boolean watch)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.getChildren");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.getChildren")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -364,8 +344,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -375,9 +353,7 @@ public class RecoverableZooKeeper {
    */
   public byte[] getData(String path, Watcher watcher, Stat stat)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.getData");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.getData")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -403,8 +379,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -414,9 +388,7 @@ public class RecoverableZooKeeper {
    */
   public byte[] getData(String path, boolean watch, Stat stat)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.getData");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.getData")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -442,8 +414,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -455,9 +425,7 @@ public class RecoverableZooKeeper {
    */
   public Stat setData(String path, byte[] data, int version)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.setData");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.setData")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       byte[] newData = appendMetaData(id, data);
       boolean isRetry = false;
@@ -505,8 +473,6 @@ public class RecoverableZooKeeper {
         retryCounter.sleepUntilNextRetry();
         isRetry = true;
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -516,9 +482,7 @@ public class RecoverableZooKeeper {
    */
   public List<ACL> getAcl(String path, Stat stat)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.getAcl");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.getAcl")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -544,8 +508,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -555,9 +517,7 @@ public class RecoverableZooKeeper {
    */
   public Stat setAcl(String path, List<ACL> acls, int version)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.setAcl");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.setAcl")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       while (true) {
         try {
@@ -583,8 +543,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -606,9 +564,7 @@ public class RecoverableZooKeeper {
   public String create(String path, byte[] data, List<ACL> acl,
       CreateMode createMode)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.create");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.create")) {
       byte[] newData = appendMetaData(id, data);
       switch (createMode) {
         case EPHEMERAL:
@@ -623,8 +579,6 @@ public class RecoverableZooKeeper {
           throw new IllegalArgumentException("Unrecognized CreateMode: " +
               createMode);
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 
@@ -753,9 +707,7 @@ public class RecoverableZooKeeper {
    */
   public List<OpResult> multi(Iterable<Op> ops)
   throws KeeperException, InterruptedException {
-    TraceScope traceScope = null;
-    try {
-      traceScope = Trace.startSpan("RecoverableZookeeper.multi");
+    try (TraceScope scope = TraceUtil.createTrace("RecoverableZookeeper.multi")) {
       RetryCounter retryCounter = retryCounterFactory.create();
       Iterable<Op> multiOps = prepareZKMulti(ops);
       while (true) {
@@ -782,8 +734,6 @@ public class RecoverableZooKeeper {
         }
         retryCounter.sleepUntilNextRetry();
       }
-    } finally {
-      if (traceScope != null) traceScope.close();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-common/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index b732bbe..8c5d40c 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -244,7 +244,7 @@
     <!-- tracing Dependencies -->
     <dependency>
       <groupId>org.apache.htrace</groupId>
-      <artifactId>htrace-core</artifactId>
+      <artifactId>htrace-core4</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
@@ -344,6 +344,12 @@
           <artifactId>hadoop-common</artifactId>
           <!--FYI This pulls in hadoop's guava. Its needed for Configuration
                at least-->
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
       <build>
@@ -390,6 +396,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
       <build>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
index 55e53e3..b31a4f6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseHTraceConfiguration.java
@@ -18,16 +18,15 @@
 
 package org.apache.hadoop.hbase.trace;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.htrace.HTraceConfiguration;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.htrace.core.HTraceConfiguration;
+import org.apache.yetus.audience.InterfaceAudience;
 
 @InterfaceAudience.Private
 public class HBaseHTraceConfiguration extends HTraceConfiguration {
-  private static final Log LOG =
-    LogFactory.getLog(HBaseHTraceConfiguration.class);
+  private static final Log LOG = LogFactory.getLog(HBaseHTraceConfiguration.class);
 
   public static final String KEY_PREFIX = "hbase.htrace.";
 
@@ -65,7 +64,7 @@ public class HBaseHTraceConfiguration extends HTraceConfiguration {
 
   @Override
   public String get(String key) {
-    return conf.get(KEY_PREFIX +key);
+    return conf.get(KEY_PREFIX + key);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index cb65f09..93a5fff 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -24,10 +24,8 @@ import java.util.HashSet;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.htrace.core.SpanReceiver;
 import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.htrace.SpanReceiver;
-import org.apache.htrace.SpanReceiverBuilder;
-import org.apache.htrace.Trace;
 
 /**
  * This class provides functions for reading the names of SpanReceivers from
@@ -62,6 +60,16 @@ public class SpanReceiverHost {
 
   }
 
+  public static Configuration getConfiguration(){
+    synchronized (SingletonHolder.INSTANCE.lock) {
+      if (SingletonHolder.INSTANCE.host == null || SingletonHolder.INSTANCE.host.conf == null) {
+        return null;
+      }
+
+      return SingletonHolder.INSTANCE.host.conf;
+    }
+  }
+
   SpanReceiverHost(Configuration conf) {
     receivers = new HashSet<>();
     this.conf = conf;
@@ -78,18 +86,18 @@ public class SpanReceiverHost {
       return;
     }
 
-    SpanReceiverBuilder builder = new SpanReceiverBuilder(new HBaseHTraceConfiguration(conf));
+    SpanReceiver.Builder builder = new SpanReceiver.Builder(new HBaseHTraceConfiguration(conf));
     for (String className : receiverNames) {
       className = className.trim();
 
-      SpanReceiver receiver = builder.spanReceiverClass(className).build();
+      SpanReceiver receiver = builder.className(className).build();
       if (receiver != null) {
         receivers.add(receiver);
         LOG.info("SpanReceiver " + className + " was loaded successfully.");
       }
     }
     for (SpanReceiver rcvr : receivers) {
-      Trace.addReceiver(rcvr);
+      TraceUtil.addReceiver(rcvr);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
new file mode 100644
index 0000000..d52c67d
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/TraceUtil.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.trace;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.htrace.core.HTraceConfiguration;
+import org.apache.htrace.core.Sampler;
+import org.apache.htrace.core.Span;
+import org.apache.htrace.core.SpanReceiver;
+import org.apache.htrace.core.TraceScope;
+import org.apache.htrace.core.Tracer;
+
+/**
+ * This wrapper class provides functions for accessing htrace 4+ functionality in a simplified way.
+ */
+public final class TraceUtil {
+  private static HTraceConfiguration conf;
+  private static Tracer tracer;
+
+  private TraceUtil() {
+  }
+
+  public static void initTracer(Configuration c) {
+    if(c != null) {
+      conf = new HBaseHTraceConfiguration(c);
+    }
+
+    if (tracer == null && conf != null) {
+      tracer = new Tracer.Builder("Tracer").conf(conf).build();
+    }
+  }
+
+  /**
+   * Wrapper method to create new TraceScope with the given description
+   * @return TraceScope or null when not tracing
+   */
+  public static TraceScope createTrace(String description) {
+    return (tracer == null) ? null : tracer.newScope(description);
+  }
+
+  /**
+   * Wrapper method to create new child TraceScope with the given description
+   * and parent scope's spanId
+   * @param span parent span
+   * @return TraceScope or null when not tracing
+   */
+  public static TraceScope createTrace(String description, Span span) {
+    if(span == null) return createTrace(description);
+
+    return (tracer == null) ? null : tracer.newScope(description, span.getSpanId());
+  }
+
+  /**
+   * Wrapper method to add new sampler to the default tracer
+   * @return true if added, false if it was already added
+   */
+  public static boolean addSampler(Sampler sampler) {
+    if (sampler == null) {
+      return false;
+    }
+
+    return (tracer == null) ? false : tracer.addSampler(sampler);
+  }
+
+  /**
+   * Wrapper method to add key-value pair to TraceInfo of actual span
+   */
+  public static void addKVAnnotation(String key, String value){
+    Span span = Tracer.getCurrentSpan();
+    if (span != null) {
+      span.addKVAnnotation(key, value);
+    }
+  }
+
+  /**
+   * Wrapper method to add receiver to actual tracerpool
+   * @return true if successfull, false if it was already added
+   */
+  public static boolean addReceiver(SpanReceiver rcvr) {
+    return (tracer == null) ? false : tracer.getTracerPool().addReceiver(rcvr);
+  }
+
+  /**
+   * Wrapper method to remove receiver from actual tracerpool
+   * @return true if removed, false if doesn't exist
+   */
+  public static boolean removeReceiver(SpanReceiver rcvr) {
+    return (tracer == null) ? false : tracer.getTracerPool().removeReceiver(rcvr);
+  }
+
+  /**
+   * Wrapper method to add timeline annotiation to current span with given message
+   */
+  public static void addTimelineAnnotation(String msg) {
+    Span span = Tracer.getCurrentSpan();
+    if (span != null) {
+      span.addTimelineAnnotation(msg);
+    }
+  }
+
+  /**
+   * Wrap runnable with current tracer and description
+   * @param runnable to wrap
+   * @return wrapped runnable or original runnable when not tracing
+   */
+  public static Runnable wrap(Runnable runnable, String description) {
+    return (tracer == null) ? runnable : tracer.wrap(runnable, description);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-endpoint/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 101bfdb..2a135c2 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -254,6 +254,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -286,6 +292,10 @@
           <scope>test</scope>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>com.google.guava</groupId>
               <artifactId>guava</artifactId>
             </exclusion>
@@ -297,6 +307,10 @@
           <scope>test</scope>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>com.google.guava</groupId>
               <artifactId>guava</artifactId>
             </exclusion>
@@ -330,12 +344,22 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-minicluster</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>com.google.guava</groupId>
               <artifactId>guava</artifactId>
             </exclusion>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-examples/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index e706283..49f71e3 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -245,6 +245,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
       <build>
@@ -290,10 +296,22 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-minicluster</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
       <build>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-external-blockcache/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-external-blockcache/pom.xml b/hbase-external-blockcache/pom.xml
index 53708d8..845e8f3 100644
--- a/hbase-external-blockcache/pom.xml
+++ b/hbase-external-blockcache/pom.xml
@@ -173,7 +173,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.htrace</groupId>
-      <artifactId>htrace-core</artifactId>
+      <artifactId>htrace-core4</artifactId>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -245,6 +245,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>com.google.guava</groupId>
               <artifactId>guava</artifactId>
             </exclusion>
@@ -297,6 +301,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>com.google.guava</groupId>
               <artifactId>guava</artifactId>
             </exclusion>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
----------------------------------------------------------------------
diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
index d759367..c05499c 100644
--- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
+++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
@@ -1,3 +1,4 @@
+
 /**
  * Copyright The Apache Software Foundation
  *
@@ -19,32 +20,32 @@
 
 package org.apache.hadoop.hbase.io.hfile;
 
-import net.spy.memcached.CachedData;
-import net.spy.memcached.ConnectionFactoryBuilder;
-import net.spy.memcached.FailureMode;
-import net.spy.memcached.MemcachedClient;
-import net.spy.memcached.transcoders.Transcoder;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.concurrent.ExecutionException;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.nio.SingleByteBuff;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.hadoop.hbase.util.Addressing;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
-
+import org.apache.htrace.core.TraceScope;
+import org.apache.yetus.audience.InterfaceAudience;
 
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.concurrent.ExecutionException;
+import net.spy.memcached.CachedData;
+import net.spy.memcached.ConnectionFactoryBuilder;
+import net.spy.memcached.FailureMode;
+import net.spy.memcached.MemcachedClient;
+import net.spy.memcached.transcoders.Transcoder;
 
 /**
  * Class to store blocks into memcached.
@@ -134,7 +135,7 @@ public class MemcachedBlockCache implements BlockCache {
     // Assume that nothing is the block cache
     HFileBlock result = null;
 
-    try (TraceScope traceScope = Trace.startSpan("MemcachedBlockCache.getBlock")) {
+    try (TraceScope traceScope = TraceUtil.createTrace("MemcachedBlockCache.getBlock")) {
       result = client.get(cacheKey.toString(), tc);
     } catch (Exception e) {
       // Catch a pretty broad set of exceptions to limit any changes in the memecache client

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-hadoop2-compat/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml
index c314aca..1a13979 100644
--- a/hbase-hadoop2-compat/pom.xml
+++ b/hbase-hadoop2-compat/pom.xml
@@ -170,6 +170,12 @@ limitations under the License.
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <version>${hadoop-two.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.htrace</groupId>
+          <artifactId>htrace-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-it/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 42c8da7..0ee29e5 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -266,7 +266,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.htrace</groupId>
-      <artifactId>htrace-core</artifactId>
+      <artifactId>htrace-core4</artifactId>
     </dependency>
     <dependency>
       <groupId>javax.ws.rs</groupId>
@@ -350,6 +350,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -400,10 +406,22 @@
 	    <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-minicluster</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index 71e0d0b..503d4c1 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.NamespaceNotFoundException;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.apache.hadoop.hbase.chaos.actions.Action;
 import org.apache.hadoop.hbase.chaos.actions.MoveRegionsOfTableAction;
 import org.apache.hadoop.hbase.chaos.actions.RestartActiveMasterAction;
@@ -62,20 +61,19 @@ import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
 import org.apache.hadoop.hbase.ipc.FatalConnectionException;
 import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
+import org.apache.hadoop.hbase.testclassification.IntegrationTests;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LoadTestTool;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
-import org.apache.htrace.Span;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
-import org.apache.htrace.impl.AlwaysSampler;
+import org.apache.htrace.core.AlwaysSampler;
+import org.apache.htrace.core.Span;
+import org.apache.htrace.core.TraceScope;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
-
 /**
  * Integration test that should benchmark how fast HBase can recover from failures. This test starts
  * different threads:
@@ -268,7 +266,7 @@ public class IntegrationTestMTTR {
 
     loadTool = null;
   }
-  
+
   private static boolean tablesOnMaster() {
     boolean ret = true;
     String value = util.getConfiguration().get("hbase.balancer.tablesOnMaster");
@@ -369,7 +367,7 @@ public class IntegrationTestMTTR {
    */
   private static class TimingResult {
     DescriptiveStatistics stats = new DescriptiveStatistics();
-    ArrayList<Long> traces = new ArrayList<>(10);
+    ArrayList<String> traces = new ArrayList<>(10);
 
     /**
      * Add a result to this aggregate result.
@@ -377,9 +375,12 @@ public class IntegrationTestMTTR {
      * @param span Span.  To be kept if the time taken was over 1 second
      */
     public void addResult(long time, Span span) {
+      if (span == null) {
+        return;
+      }
       stats.addValue(TimeUnit.MILLISECONDS.convert(time, TimeUnit.NANOSECONDS));
       if (TimeUnit.SECONDS.convert(time, TimeUnit.NANOSECONDS) >= 1) {
-        traces.add(span.getTraceId());
+        traces.add(span.getTracerId());
       }
     }
 
@@ -419,12 +420,15 @@ public class IntegrationTestMTTR {
       final int maxIterations = 10;
       int numAfterDone = 0;
       int resetCount = 0;
+      TraceUtil.addSampler(AlwaysSampler.INSTANCE);
       // Keep trying until the rs is back up and we've gotten a put through
       while (numAfterDone < maxIterations) {
         long start = System.nanoTime();
-        TraceScope scope = null;
-        try {
-          scope = Trace.startSpan(getSpanName(), AlwaysSampler.INSTANCE);
+        Span span = null;
+        try (TraceScope scope = TraceUtil.createTrace(getSpanName())) {
+          if (scope != null) {
+            span = scope.getSpan();
+          }
           boolean actionResult = doAction();
           if (actionResult && future.isDone()) {
             numAfterDone++;
@@ -470,12 +474,8 @@ public class IntegrationTestMTTR {
             LOG.info("Too many unexpected Exceptions. Aborting.", e);
             throw e;
           }
-        } finally {
-          if (scope != null) {
-            scope.close();
-          }
         }
-        result.addResult(System.nanoTime() - start, scope.getSpan());
+        result.addResult(System.nanoTime() - start, span);
       }
       return result;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
index 327d879..780c461 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
@@ -35,9 +35,8 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.htrace.Sampler;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
+import org.apache.htrace.core.Sampler;
+import org.apache.htrace.core.TraceScope;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -117,13 +116,12 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
 
       for (int i = 0; i < 100; i++) {
         Runnable runnable = new Runnable() {
-          private TraceScope innerScope = null;
           private final LinkedBlockingQueue<Long> rowKeyQueue = rks;
           @Override
           public void run() {
             ResultScanner rs = null;
-            try {
-              innerScope = Trace.startSpan("Scan", Sampler.ALWAYS);
+            TraceUtil.addSampler(Sampler.ALWAYS);
+            try (TraceScope scope = TraceUtil.createTrace("Scan")){
               Table ht = util.getConnection().getTable(tableName);
               Scan s = new Scan();
               s.setStartRow(Bytes.toBytes(rowKeyQueue.take()));
@@ -137,20 +135,15 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
                 accum |= Bytes.toLong(r.getRow());
               }
 
-              innerScope.getSpan().addTimelineAnnotation("Accum result = " + accum);
+              TraceUtil.addTimelineAnnotation("Accum result = " + accum);
 
               ht.close();
               ht = null;
             } catch (IOException e) {
               e.printStackTrace();
-
-              innerScope.getSpan().addKVAnnotation(
-                  Bytes.toBytes("exception"),
-                  Bytes.toBytes(e.getClass().getSimpleName()));
-
+              TraceUtil.addKVAnnotation("exception", e.getClass().getSimpleName());
             } catch (Exception e) {
             } finally {
-              if (innerScope != null) innerScope.close();
               if (rs != null) rs.close();
             }
 
@@ -165,7 +158,6 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
       throws IOException {
     for (int i = 0; i < 100; i++) {
       Runnable runnable = new Runnable() {
-        private TraceScope innerScope = null;
         private final LinkedBlockingQueue<Long> rowKeyQueue = rowKeys;
 
         @Override
@@ -180,9 +172,9 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
           }
 
           long accum = 0;
+          TraceUtil.addSampler(Sampler.ALWAYS);
           for (int x = 0; x < 5; x++) {
-            try {
-              innerScope = Trace.startSpan("gets", Sampler.ALWAYS);
+            try (TraceScope scope = TraceUtil.createTrace("gets")) {
               long rk = rowKeyQueue.take();
               Result r1 = ht.get(new Get(Bytes.toBytes(rk)));
               if (r1 != null) {
@@ -192,14 +184,10 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
               if (r2 != null) {
                 accum |= Bytes.toLong(r2.getRow());
               }
-              innerScope.getSpan().addTimelineAnnotation("Accum = " + accum);
+              TraceUtil.addTimelineAnnotation("Accum = " + accum);
 
-            } catch (IOException e) {
+            } catch (IOException|InterruptedException ie) {
               // IGNORED
-            } catch (InterruptedException ie) {
-              // IGNORED
-            } finally {
-              if (innerScope != null) innerScope.close();
             }
           }
 
@@ -210,25 +198,18 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
   }
 
   private void createTable() throws IOException {
-    TraceScope createScope = null;
-    try {
-      createScope = Trace.startSpan("createTable", Sampler.ALWAYS);
+    TraceUtil.addSampler(Sampler.ALWAYS);
+    try (TraceScope scope = TraceUtil.createTrace("createTable")) {
       util.createTable(tableName, familyName);
-    } finally {
-      if (createScope != null) createScope.close();
     }
   }
 
   private void deleteTable() throws IOException {
-    TraceScope deleteScope = null;
-
-    try {
+    TraceUtil.addSampler(Sampler.ALWAYS);
+    try (TraceScope scope = TraceUtil.createTrace("deleteTable")) {
       if (admin.tableExists(tableName)) {
-        deleteScope = Trace.startSpan("deleteTable", Sampler.ALWAYS);
         util.deleteTable(tableName);
       }
-    } finally {
-      if (deleteScope != null) deleteScope.close();
     }
   }
 
@@ -236,9 +217,9 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
     LinkedBlockingQueue<Long> rowKeys = new LinkedBlockingQueue<>(25000);
     BufferedMutator ht = util.getConnection().getBufferedMutator(this.tableName);
     byte[] value = new byte[300];
+    TraceUtil.addSampler(Sampler.ALWAYS);
     for (int x = 0; x < 5000; x++) {
-      TraceScope traceScope = Trace.startSpan("insertData", Sampler.ALWAYS);
-      try {
+      try (TraceScope traceScope = TraceUtil.createTrace("insertData")) {
         for (int i = 0; i < 5; i++) {
           long rk = random.nextLong();
           rowKeys.add(rk);
@@ -252,8 +233,6 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
         if ((x % 1000) == 0) {
           admin.flush(tableName);
         }
-      } finally {
-        traceScope.close();
       }
     }
     admin.flush(tableName);

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-mapreduce/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/pom.xml b/hbase-mapreduce/pom.xml
index 42a50bc..883cda2 100644
--- a/hbase-mapreduce/pom.xml
+++ b/hbase-mapreduce/pom.xml
@@ -181,7 +181,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.htrace</groupId>
-      <artifactId>htrace-core</artifactId>
+      <artifactId>htrace-core4</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
@@ -247,6 +247,10 @@
       <scope>test</scope>
       <exclusions>
         <exclusion>
+          <groupId>org.apache.htrace</groupId>
+          <artifactId>htrace-core</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>com.google.guava</groupId>
           <artifactId>guava</artifactId>
         </exclusion>
@@ -332,6 +336,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>net.java.dev.jets3t</groupId>
               <artifactId>jets3t</artifactId>
             </exclusion>
@@ -378,6 +386,10 @@
           <artifactId>hadoop-hdfs</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>javax.servlet.jsp</groupId>
               <artifactId>jsp-api</artifactId>
             </exclusion>
@@ -415,6 +427,12 @@
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-minicluster</artifactId>
           <scope>test</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
 
@@ -439,11 +457,23 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <!--maven dependency:analyze says not needed but tests fail w/o-->
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-minicluster</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
     </profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
index 40e2cb9..31d33f2 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
@@ -818,7 +818,7 @@ public class TableMapReduceUtil {
       com.google.protobuf.Message.class,
       org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations.class,
       org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists.class,
-      org.apache.htrace.Trace.class,
+      org.apache.htrace.core.Tracer.class,
       com.codahale.metrics.MetricRegistry.class,
       org.apache.commons.lang3.ArrayUtils.class,
       com.fasterxml.jackson.databind.ObjectMapper.class,

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index bc36cde..2917605 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -31,10 +31,10 @@ import java.util.Date;
 import java.util.LinkedList;
 import java.util.Locale;
 import java.util.Map;
+import java.util.NoSuchElementException;
 import java.util.Queue;
 import java.util.Random;
 import java.util.TreeMap;
-import java.util.NoSuchElementException;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
@@ -48,7 +48,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.AsyncConnection;
@@ -81,9 +80,17 @@ import org.apache.hadoop.hbase.io.hfile.RandomDistribution;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
+import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
 import org.apache.hadoop.hbase.trace.SpanReceiverHost;
-import org.apache.hadoop.hbase.util.*;
+import org.apache.hadoop.hbase.trace.TraceUtil;
+import org.apache.hadoop.hbase.util.ByteArrayHashKey;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Hash;
+import org.apache.hadoop.hbase.util.MurmurHash;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.YammerHistogramUtils;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -93,17 +100,15 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
 import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.htrace.Sampler;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
-import org.apache.htrace.impl.ProbabilitySampler;
-import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
-import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
+import org.apache.htrace.core.ProbabilitySampler;
+import org.apache.htrace.core.Sampler;
+import org.apache.htrace.core.TraceScope;
+import org.apache.yetus.audience.InterfaceAudience;
 
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.UniformReservoir;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 /**
  * Script used evaluating HBase performance and scalability.  Runs a HBase
@@ -1034,7 +1039,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
     protected final TestOptions opts;
 
     private final Status status;
-    private final Sampler<?> traceSampler;
+    private final Sampler traceSampler;
     private final SpanReceiverHost receiverHost;
 
     private String testName;
@@ -1182,17 +1187,15 @@ public class PerformanceEvaluation extends Configured implements Tool {
     void testTimed() throws IOException, InterruptedException {
       int startRow = getStartRow();
       int lastRow = getLastRow();
+      TraceUtil.addSampler(traceSampler);
       // Report on completion of 1/10th of total.
       for (int ii = 0; ii < opts.cycles; ii++) {
         if (opts.cycles > 1) LOG.info("Cycle=" + ii + " of " + opts.cycles);
         for (int i = startRow; i < lastRow; i++) {
           if (i % everyN != 0) continue;
           long startTime = System.nanoTime();
-          TraceScope scope = Trace.startSpan("test row", traceSampler);
-          try {
+          try (TraceScope scope = TraceUtil.createTrace("test row");){
             testRow(i);
-          } finally {
-            scope.close();
           }
           if ( (i - startRow) > opts.measureAfter) {
             // If multiget is enabled, say set to 10, testRow() returns immediately first 9 times

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-procedure/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml
index 764457a..bb9ce84 100644
--- a/hbase-procedure/pom.xml
+++ b/hbase-procedure/pom.xml
@@ -164,6 +164,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
     </profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-protocol-shaded/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml
index 4f52bba..1676691 100644
--- a/hbase-protocol-shaded/pom.xml
+++ b/hbase-protocol-shaded/pom.xml
@@ -239,6 +239,10 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.htrace</groupId>
+      <artifactId>htrace-core4</artifactId>
+    </dependency>
   </dependencies>
   <profiles>
     <!-- Skip the tests in this module -->

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-replication/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index a56a470..942fd8c 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -166,6 +166,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>net.java.dev.jets3t</groupId>
               <artifactId>jets3t</artifactId>
             </exclusion>
@@ -229,6 +233,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
     </profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 78855df..bc2eb93 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -204,6 +204,10 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-hadoop-compat</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
       <artifactId>hbase-server</artifactId>
     </dependency>
     <dependency>
@@ -387,6 +391,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
@@ -426,6 +436,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-rsgroup/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml
index ee75ef9..9b0bfe7 100644
--- a/hbase-rsgroup/pom.xml
+++ b/hbase-rsgroup/pom.xml
@@ -208,6 +208,10 @@
           <artifactId>hadoop-common</artifactId>
           <exclusions>
             <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+            <exclusion>
               <groupId>net.java.dev.jets3t</groupId>
               <artifactId>jets3t</artifactId>
             </exclusion>
@@ -270,6 +274,12 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.htrace</groupId>
+              <artifactId>htrace-core</artifactId>
+            </exclusion>
+          </exclusions>
         </dependency>
       </dependencies>
     </profile>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index 1a4689e..1e5a1f3 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -540,7 +540,12 @@
     <!-- tracing Dependencies -->
     <dependency>
       <groupId>org.apache.htrace</groupId>
+      <artifactId>htrace-core4</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.htrace</groupId>
       <artifactId>htrace-core</artifactId>
+      <version>${htrace-hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>com.lmax</groupId>

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
index cfe3d61..1056c20 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
@@ -23,11 +23,12 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.Server;
-import org.apache.htrace.Span;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
+import org.apache.hadoop.hbase.trace.TraceUtil;
+import org.apache.htrace.core.Span;
+import org.apache.htrace.core.TraceScope;
+import org.apache.htrace.core.Tracer;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Abstract base class for all HBase event handlers. Subclasses should
@@ -74,7 +75,7 @@ public abstract class EventHandler implements Runnable, Comparable<Runnable> {
    * Default base class constructor.
    */
   public EventHandler(Server server, EventType eventType) {
-    this.parent = Trace.currentSpan();
+    this.parent = Tracer.getCurrentSpan();
     this.server = server;
     this.eventType = eventType;
     seqid = seqids.incrementAndGet();
@@ -99,13 +100,10 @@ public abstract class EventHandler implements Runnable, Comparable<Runnable> {
 
   @Override
   public void run() {
-    TraceScope chunk = Trace.startSpan(this.getClass().getSimpleName(), parent);
-    try {
+    try (TraceScope scope = TraceUtil.createTrace(this.getClass().getSimpleName(), parent)) {
       process();
     } catch(Throwable t) {
       handleException(t);
-    } finally {
-      chunk.close();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
index 9e29023..f216f42 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.ByteBufferKeyValue;
 import org.apache.hadoop.hbase.SizeCachedKeyValue;
 import org.apache.hadoop.hbase.SizeCachedNoTagsKeyValue;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
@@ -59,8 +60,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.IdLock;
 import org.apache.hadoop.hbase.util.ObjectIntPair;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
+import org.apache.htrace.core.TraceScope;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
@@ -255,6 +255,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
     // Prefetch file blocks upon open if requested
     if (cacheConf.shouldPrefetchOnOpen()) {
       PrefetchExecutor.request(path, new Runnable() {
+        @Override
         public void run() {
           long offset = 0;
           long end = 0;
@@ -436,6 +437,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
    * @return the total heap size of data and meta block indexes in bytes. Does
    *         not take into account non-root blocks of a multilevel data index.
    */
+  @Override
   public long indexSize() {
     return (dataBlockIndexReader != null ? dataBlockIndexReader.heapSize() : 0)
         + ((metaBlockIndexReader != null) ? metaBlockIndexReader.heapSize()
@@ -1239,6 +1241,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
     }
   }
 
+  @Override
   public Path getPath() {
     return path;
   }
@@ -1276,10 +1279,12 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
   protected boolean decodeMemstoreTS = false;
 
 
+  @Override
   public boolean isDecodeMemStoreTS() {
     return this.decodeMemstoreTS;
   }
 
+  @Override
   public boolean shouldIncludeMemStoreTS() {
     return includesMemstoreTS;
   }
@@ -1437,8 +1442,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
 
     boolean useLock = false;
     IdLock.Entry lockEntry = null;
-    TraceScope traceScope = Trace.startSpan("HFileReaderImpl.readBlock");
-    try {
+    try (TraceScope traceScope = TraceUtil.createTrace("HFileReaderImpl.readBlock")) {
       while (true) {
         // Check cache for block. If found return.
         if (cacheConf.shouldReadBlockFromCache(expectedBlockType)) {
@@ -1453,9 +1457,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
             if (LOG.isTraceEnabled()) {
               LOG.trace("From Cache " + cachedBlock);
             }
-            if (Trace.isTracing()) {
-              traceScope.getSpan().addTimelineAnnotation("blockCacheHit");
-            }
+            TraceUtil.addTimelineAnnotation("blockCacheHit");
             assert cachedBlock.isUnpacked() : "Packed block leak.";
             if (cachedBlock.getBlockType().isData()) {
               if (updateCacheMetrics) {
@@ -1481,9 +1483,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
           // Carry on, please load.
         }
 
-        if (Trace.isTracing()) {
-          traceScope.getSpan().addTimelineAnnotation("blockCacheMiss");
-        }
+        TraceUtil.addTimelineAnnotation("blockCacheMiss");
         // Load block from filesystem.
         HFileBlock hfileBlock =
             fsBlockReader.readBlockData(dataBlockOffset, onDiskBlockSize, pread, !isCompaction);
@@ -1505,7 +1505,6 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
         return unpacked;
       }
     } finally {
-      traceScope.close();
       if (lockEntry != null) {
         offsetLock.releaseLockEntry(lockEntry);
       }
@@ -1568,6 +1567,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
     close(cacheConf.shouldEvictOnClose());
   }
 
+  @Override
   public void close(boolean evictOnClose) throws IOException {
     PrefetchExecutor.cancel(path);
     if (evictOnClose && cacheConf.isBlockCacheEnabled()) {
@@ -1580,11 +1580,13 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
     fsBlockReader.closeStreams();
   }
 
+  @Override
   public DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction) {
     return dataBlockEncoder.getEffectiveEncodingInCache(isCompaction);
   }
 
   /** For testing */
+  @Override
   public HFileBlock.FSReader getUncachedBlockReader() {
     return fsBlockReader;
   }
@@ -1612,6 +1614,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
       return curBlock != null;
     }
 
+    @Override
     public void setNonSeekedState() {
       reset();
     }
@@ -1713,6 +1716,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
       }
     }
 
+    @Override
     protected Cell getFirstKeyCellInBlock(HFileBlock curBlock) {
       return dataBlockEncoder.getFirstKeyCellInBlock(getEncodedBuffer(curBlock));
     }
@@ -1730,6 +1734,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
       return seeker.seekToKeyInBlock(key, seekBefore);
     }
 
+    @Override
     public int compareKey(CellComparator comparator, Cell key) {
       return seeker.compareKey(comparator, key);
     }
@@ -1776,6 +1781,7 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
    * Returns false if block prefetching was requested for this file and has
    * not completed, true otherwise
    */
+  @Override
   @VisibleForTesting
   public boolean prefetchComplete() {
     return PrefetchExecutor.isCompleted(path);

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
index d4fc706..141674d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java
@@ -24,6 +24,7 @@ import java.util.Optional;
 import org.apache.hadoop.hbase.CallDroppedException;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.trace.TraceUtil;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
@@ -32,8 +33,6 @@ import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.htrace.Trace;
-import org.apache.htrace.TraceScope;
 
 /**
  * The request processing logic, which is usually executed in thread pools provided by an
@@ -116,20 +115,17 @@ public class CallRunner {
       String error = null;
       Pair<Message, CellScanner> resultPair = null;
       RpcServer.CurCall.set(call);
-      TraceScope traceScope = null;
       try {
         if (!this.rpcServer.isStarted()) {
           InetSocketAddress address = rpcServer.getListenerAddress();
           throw new ServerNotRunningYetException("Server " +
               (address != null ? address : "(channel closed)") + " is not running yet");
         }
-        if (call.getTraceInfo() != null) {
-          String serviceName =
-              call.getService() != null ? call.getService().getDescriptorForType().getName() : "";
-          String methodName = (call.getMethod() != null) ? call.getMethod().getName() : "";
-          String traceString = serviceName + "." + methodName;
-          traceScope = Trace.startSpan(traceString, call.getTraceInfo());
-        }
+        String serviceName =
+            call.getService() != null ? call.getService().getDescriptorForType().getName() : "";
+        String methodName = (call.getMethod() != null) ? call.getMethod().getName() : "";
+        String traceString = serviceName + "." + methodName;
+        TraceUtil.createTrace(traceString);
         // make the call
         resultPair = this.rpcServer.call(call, this.status);
       } catch (TimeoutIOException e){
@@ -150,9 +146,6 @@ public class CallRunner {
           throw (Error)e;
         }
       } finally {
-        if (traceScope != null) {
-          traceScope.close();
-        }
         RpcServer.CurCall.set(null);
         if (resultPair != null) {
           this.rpcServer.addCallSize(call.getSize() * -1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
index 7fd4736..f86fa77 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcServer.java
@@ -181,7 +181,7 @@ public class NettyRpcServer extends RpcServer {
       Message param, CellScanner cellScanner, long receiveTime, MonitoredRPCHandler status,
       long startTime, int timeout) throws IOException {
     NettyServerCall fakeCall = new NettyServerCall(-1, service, md, null, param, cellScanner, null,
-        -1, null, null, receiveTime, timeout, reservoir, cellBlockBuilder, null);
+        -1, null, receiveTime, timeout, reservoir, cellBlockBuilder, null);
     return call(fakeCall, status);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/377174d3/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java
index 7dfdc72..70b9da3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyServerCall.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-import org.apache.htrace.TraceInfo;
 
 /**
  * Datastructure that holds all necessary to a method invocation and then afterward, carries the
@@ -40,9 +39,9 @@ class NettyServerCall extends ServerCall<NettyServerRpcConnection> {
 
   NettyServerCall(int id, BlockingService service, MethodDescriptor md, RequestHeader header,
       Message param, CellScanner cellScanner, NettyServerRpcConnection connection, long size,
-      TraceInfo tinfo, InetAddress remoteAddress, long receiveTime, int timeout,
+      InetAddress remoteAddress, long receiveTime, int timeout,
       ByteBufferPool reservoir, CellBlockBuilder cellBlockBuilder, CallCleanup reqCleanup) {
-    super(id, service, md, header, param, cellScanner, connection, size, tinfo, remoteAddress,
+    super(id, service, md, header, param, cellScanner, connection, size, remoteAddress,
         receiveTime, timeout, reservoir, cellBlockBuilder, reqCleanup);
   }