You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by la...@apache.org on 2016/06/02 20:22:21 UTC

[02/16] phoenix git commit: PHOENIX-1642 Make Phoenix Master Branch pointing to HBase1.0.0

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java
index cee3b95..8bd918e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java
@@ -19,7 +19,7 @@ package org.apache.phoenix.trace;
 
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
-import org.cloudera.htrace.Span;
+import org.apache.htrace.Span;
 
 /**
  * Utilities for tracing

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/main/java/org/apache/phoenix/trace/util/NullSpan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/NullSpan.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/NullSpan.java
index 3799fdb..b4f70b9 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/NullSpan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/NullSpan.java
@@ -21,8 +21,9 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.cloudera.htrace.Span;
-import org.cloudera.htrace.TimelineAnnotation;
+import org.apache.htrace.Span;
+import org.apache.htrace.TimelineAnnotation;
+import org.apache.phoenix.util.StringUtil;
 
 /**
  * Fake {@link Span} that doesn't save any state, in place of <tt>null</tt> return values, to avoid
@@ -109,4 +110,9 @@ public class NullSpan implements Span {
   public String getProcessId() {
     return null;
   }
+
+  @Override
+  public String toJson() {
+    return StringUtil.EMPTY_STRING;
+  }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java
index 7cd55e8..c9add01 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.trace.util;
 
 import static org.apache.phoenix.util.StringUtil.toBytes;
 
+import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
 import java.util.concurrent.Callable;
@@ -28,20 +29,22 @@ import javax.annotation.Nullable;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.htrace.HTraceConfiguration;
 import org.apache.phoenix.call.CallRunner;
 import org.apache.phoenix.call.CallWrapper;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.parse.TraceStatement;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.trace.TraceMetricSource;
-import org.cloudera.htrace.Sampler;
-import org.cloudera.htrace.Span;
-import org.cloudera.htrace.Trace;
-import org.cloudera.htrace.TraceScope;
-import org.cloudera.htrace.Tracer;
-import org.cloudera.htrace.impl.ProbabilitySampler;
-import org.cloudera.htrace.wrappers.TraceCallable;
-import org.cloudera.htrace.wrappers.TraceRunnable;
+import org.apache.htrace.Sampler;
+import org.apache.htrace.Span;
+import org.apache.htrace.Trace;
+import org.apache.htrace.TraceScope;
+import org.apache.htrace.Tracer;
+import org.apache.htrace.impl.ProbabilitySampler;
+import org.apache.htrace.wrappers.TraceCallable;
+import org.apache.htrace.wrappers.TraceRunnable;
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
@@ -58,10 +61,10 @@ public class Tracing {
     // Constants for tracing across the wire
     public static final String TRACE_ID_ATTRIBUTE_KEY = "phoenix.trace.traceid";
     public static final String SPAN_ID_ATTRIBUTE_KEY = "phoenix.trace.spanid";
-    
+
     // Constants for passing into the metrics system
     private static final String TRACE_METRIC_PREFIX = "phoenix.trace.instance";
-    
+
     /**
      * Manage the types of frequencies that we support. By default, we never turn on tracing.
      */
@@ -110,11 +113,12 @@ public class Tracing {
     private static Function<ConfigurationAdapter, Sampler<?>> CREATE_PROBABILITY =
             new Function<ConfigurationAdapter, Sampler<?>>() {
                 @Override
-                public Sampler<?> apply(ConfigurationAdapter conn) {
+                public Sampler<?> apply(ConfigurationAdapter conf) {
                     // get the connection properties for the probability information
-                    String probThresholdStr = conn.get(QueryServices.TRACING_PROBABILITY_THRESHOLD_ATTRIB, null);
-                    double threshold = probThresholdStr == null ? QueryServicesOptions.DEFAULT_TRACING_PROBABILITY_THRESHOLD : Double.parseDouble(probThresholdStr);
-                    return new ProbabilitySampler(threshold);
+                    Map<String, String> items = new HashMap<String, String>();
+                    items.put(ProbabilitySampler.SAMPLER_FRACTION_CONF_KEY,
+                            conf.get(QueryServices.TRACING_PROBABILITY_THRESHOLD_ATTRIB, Double.toString(QueryServicesOptions.DEFAULT_TRACING_PROBABILITY_THRESHOLD)));
+                    return new ProbabilitySampler(HTraceConfiguration.fromMap(items));
                 }
             };
 
@@ -130,6 +134,19 @@ public class Tracing {
                 conf));
     }
 
+    public static Sampler<?> getConfiguredSampler(TraceStatement traceStatement) {
+      double samplingRate = traceStatement.getSamplingRate();
+      if (samplingRate >= 1.0) {
+          return Sampler.ALWAYS;
+      } else if (samplingRate < 1.0 && samplingRate > 0.0) {
+          Map<String, String> items = new HashMap<String, String>();
+          items.put(ProbabilitySampler.SAMPLER_FRACTION_CONF_KEY, Double.toString(samplingRate));
+          return new ProbabilitySampler(HTraceConfiguration.fromMap(items));
+      } else {
+          return Sampler.NEVER;
+      }
+    }
+
     private static Sampler<?> getSampler(String traceLevel, ConfigurationAdapter conf) {
         return Frequency.getSampler(traceLevel).builder.apply(conf);
     }
@@ -202,13 +219,13 @@ public class Tracing {
     public static CallWrapper withTracing(PhoenixConnection conn, String desc) {
         return new TracingWrapper(conn, desc);
     }
-    
+
     private static void addCustomAnnotationsToSpan(@Nullable Span span, @NotNull PhoenixConnection conn) {
         Preconditions.checkNotNull(conn);
-        
+
         if (span == null) {
         	return;
-        } 
+        }
 		Map<String, String> annotations = conn.getCustomTracingAnnotations();
 		// copy over the annotations as bytes
 		for (Map.Entry<String, String> annotation : annotations.entrySet()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
index c147f91..0ab9791 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
@@ -569,6 +569,10 @@ public class IndexUtil {
                     return cell.getMvccVersion();
                 }
 
+                @Override public long getSequenceId() {
+                    return cell.getSequenceId();
+                }
+
                 @Override
                 public byte[] getValueArray() {
                     return cell.getValueArray();
@@ -595,7 +599,7 @@ public class IndexUtil {
                 }
 
                 @Override
-                public short getTagsLength() {
+                public int getTagsLength() {
                     return cell.getTagsLength();
                 }
 
@@ -618,12 +622,6 @@ public class IndexUtil {
                 public byte[] getRow() {
                     return cell.getRow();
                 }
-
-                @Override
-                @Deprecated
-                public int getTagsLengthUnsigned() {
-                    return cell.getTagsLengthUnsigned();
-                }
             };
             // Wrap cell in cell that offsets row key
             result.set(i, newCell);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/hadoop/hbase/ipc/PhoenixIndexRpcSchedulerTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/hadoop/hbase/ipc/PhoenixIndexRpcSchedulerTest.java b/phoenix-core/src/test/java/org/apache/hadoop/hbase/ipc/PhoenixIndexRpcSchedulerTest.java
index ec18d9b..8bd8c11 100644
--- a/phoenix-core/src/test/java/org/apache/hadoop/hbase/ipc/PhoenixIndexRpcSchedulerTest.java
+++ b/phoenix-core/src/test/java/org/apache/hadoop/hbase/ipc/PhoenixIndexRpcSchedulerTest.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.ipc;
 
 import static org.junit.Assert.assertEquals;
 
+import java.net.InetSocketAddress;
 import java.util.List;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.TimeUnit;
@@ -37,6 +38,7 @@ import org.mockito.Mockito;
 public class PhoenixIndexRpcSchedulerTest {
 
     private static final Configuration conf = HBaseConfiguration.create();
+    private static final InetSocketAddress isa = new InetSocketAddress("localhost", 0);
 
     @Test
     public void testIndexPriorityWritesToIndexHandler() throws Exception {
@@ -86,7 +88,7 @@ public class PhoenixIndexRpcSchedulerTest {
     private void dispatchCallWithPriority(RpcScheduler scheduler, int priority) throws Exception {
         CallRunner task = Mockito.mock(CallRunner.class);
         RequestHeader header = RequestHeader.newBuilder().setPriority(priority).build();
-        RpcServer server = new RpcServer(null, "test-rpcserver", null, null, conf, scheduler);
+        RpcServer server = new RpcServer(null, "test-rpcserver", null, isa, conf, scheduler);
         RpcServer.Call call =
                 server.new Call(0, null, null, header, null, null, null, null, 10, null);
         Mockito.when(task.getCall()).thenReturn(call);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/PhoenixIndexRpcSchedulerFactoryTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/PhoenixIndexRpcSchedulerFactoryTest.java b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/PhoenixIndexRpcSchedulerFactoryTest.java
index 4918bba..7d08c0d 100644
--- a/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/PhoenixIndexRpcSchedulerFactoryTest.java
+++ b/phoenix-core/src/test/java/org/apache/hadoop/hbase/regionserver/PhoenixIndexRpcSchedulerFactoryTest.java
@@ -30,13 +30,13 @@ public class PhoenixIndexRpcSchedulerFactoryTest {
     @Test
     public void ensureInstantiation() throws Exception {
         Configuration conf = new Configuration(false);
-        conf.setClass(HRegionServer.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS,
+        conf.setClass(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS,
             PhoenixIndexRpcSchedulerFactory.class, RpcSchedulerFactory.class);
         // kinda lame that we copy the copy from the regionserver to do this and can't use a static
         // method, but meh
         try {
             Class<?> rpcSchedulerFactoryClass =
-                    conf.getClass(HRegionServer.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS,
+                    conf.getClass(RSRpcServices.REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS,
                         SimpleRpcSchedulerFactory.class);
             Object o = rpcSchedulerFactoryClass.newInstance();
             assertTrue(o instanceof PhoenixIndexRpcSchedulerFactory);
@@ -63,7 +63,7 @@ public class PhoenixIndexRpcSchedulerFactoryTest {
         setMinMax(conf, 0, 4);
         factory.create(conf, null);
 
-        setMinMax(conf, 101, 102);
+        setMinMax(conf, 201, 202);
         factory.create(conf, null);
 
         setMinMax(conf, 102, 101);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/TestLocalTableState.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/TestLocalTableState.java b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/TestLocalTableState.java
index 8c15551..54db5d8 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/TestLocalTableState.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/TestLocalTableState.java
@@ -76,7 +76,7 @@ public class TestLocalTableState {
       public Boolean answer(InvocationOnMock invocation) throws Throwable {
         List<KeyValue> list = (List<KeyValue>) invocation.getArguments()[0];
         KeyValue kv = new KeyValue(row, fam, qual, ts, Type.Put, stored);
-        kv.setMvccVersion(0);
+        kv.setSequenceId(0);
         list.add(kv);
         return false;
       }
@@ -115,7 +115,7 @@ public class TestLocalTableState {
     Mockito.when(region.getScanner(Mockito.any(Scan.class))).thenReturn(scanner);
     final byte[] stored = Bytes.toBytes("stored-value");
     final KeyValue storedKv = new KeyValue(row, fam, qual, ts, Type.Put, stored);
-    storedKv.setMvccVersion(2);
+    storedKv.setSequenceId(2);
     Mockito.when(scanner.next(Mockito.any(List.class))).thenAnswer(new Answer<Boolean>() {
       @Override
       public Boolean answer(InvocationOnMock invocation) throws Throwable {
@@ -129,7 +129,7 @@ public class TestLocalTableState {
     LocalTableState table = new LocalTableState(env, state, m);
     // add the kvs from the mutation
     KeyValue kv = KeyValueUtil.ensureKeyValue(m.get(fam, qual).get(0));
-    kv.setMvccVersion(0);
+    kv.setSequenceId(0);
     table.addPendingUpdates(kv);
 
     // setup the lookup
@@ -161,7 +161,7 @@ public class TestLocalTableState {
     Mockito.when(region.getScanner(Mockito.any(Scan.class))).thenReturn(scanner);
     final KeyValue storedKv =
         new KeyValue(row, fam, qual, ts, Type.Put, Bytes.toBytes("stored-value"));
-    storedKv.setMvccVersion(2);
+    storedKv.setSequenceId(2);
     Mockito.when(scanner.next(Mockito.any(List.class))).thenAnswer(new Answer<Boolean>() {
       @Override
       public Boolean answer(InvocationOnMock invocation) throws Throwable {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java
index 41e7e65..42e0b03 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/covered/data/TestIndexMemStore.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.phoenix.hbase.index.covered.data.IndexMemStore;
 import org.junit.Test;
 
 public class TestIndexMemStore {
@@ -40,9 +39,9 @@ public class TestIndexMemStore {
     IndexMemStore store = new IndexMemStore(IndexMemStore.COMPARATOR);
     long ts = 10;
     KeyValue kv = new KeyValue(row, family, qual, ts, Type.Put, val);
-    kv.setMvccVersion(2);
+    kv.setSequenceId(2);
     KeyValue kv2 = new KeyValue(row, family, qual, ts, Type.Put, val2);
-    kv2.setMvccVersion(0);
+    kv2.setSequenceId(0);
     store.add(kv, true);
     // adding the exact same kv shouldn't change anything stored if not overwritting
     store.add(kv2, false);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java
index 375b754..60c11d7 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java
@@ -216,7 +216,7 @@ public class TestWALRecoveryCaching {
         LOG.info("\t== Offline: " + server.getServerName());
         continue;
       }
-      List<HRegionInfo> regions = ProtobufUtil.getOnlineRegions(server);
+      List<HRegionInfo> regions = ProtobufUtil.getOnlineRegions(server.getRSRpcServices());
       LOG.info("\t" + server.getServerName() + " regions: " + regions);
     }
 
@@ -262,9 +262,9 @@ public class TestWALRecoveryCaching {
   }
 
   /**
-   * @param miniHBaseCluster
+   * @param cluster
    * @param server
-   * @param bs
+   * @param table
    * @return
    */
   private List<HRegion> getRegionsFromServerForTable(MiniHBaseCluster cluster, ServerName server,
@@ -281,9 +281,9 @@ public class TestWALRecoveryCaching {
   }
 
   /**
-   * @param miniHBaseCluster
-   * @param indexedTableName
-   * @param tableNameString
+   * @param cluster
+   * @param indexTable
+   * @param primaryTable
    */
   private ServerName ensureTablesLiveOnSameServer(MiniHBaseCluster cluster, byte[] indexTable,
       byte[] primaryTable) throws Exception {
@@ -366,7 +366,7 @@ public class TestWALRecoveryCaching {
     List<HRegion> indexRegions = cluster.getRegions(table);
     Set<ServerName> indexServers = new HashSet<ServerName>();
     for (HRegion region : indexRegions) {
-      indexServers.add(cluster.getServerHoldingRegion(region.getRegionName()));
+      indexServers.add(cluster.getServerHoldingRegion(null, region.getRegionName()));
     }
     return indexServers;
   }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
index a3a02ce..f42dbd7 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/recovery/TestPerRegionIndexWriteCache.java
@@ -36,10 +36,10 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.wal.HLog;
-import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.junit.After;
@@ -70,13 +70,14 @@ public class TestPerRegionIndexWriteCache {
   @SuppressWarnings("deprecation")
 @Before
   public void setUp() throws Exception {
-      FileSystem newFS = FileSystem.get(TEST_UTIL.getConfiguration());
       Path hbaseRootDir = TEST_UTIL.getDataTestDir();
-      
+      TEST_UTIL.getConfiguration().set("hbase.rootdir", hbaseRootDir.toString());
+
+      FileSystem newFS = FileSystem.newInstance(TEST_UTIL.getConfiguration());
       HRegionInfo hri = new HRegionInfo(tableName, null, null, false);
-      Path basedir = FSUtils.getTableDir(hbaseRootDir, tableName); 
-      HLog wal = HLogFactory.createHLog(newFS, 
-          hbaseRootDir, "logs", TEST_UTIL.getConfiguration());
+      Path basedir = FSUtils.getTableDir(hbaseRootDir, tableName);
+      WALFactory walFactory = new WALFactory(TEST_UTIL.getConfiguration(), null, "TestPerRegionIndexWriteCache");
+      WAL wal = walFactory.getWAL(Bytes.toBytes("logs"));
       HTableDescriptor htd = new HTableDescriptor(tableName);
       HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a"));
       htd.addFamily(a);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-core/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java b/phoenix-core/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java
index 4622959..eabcaca 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java
@@ -24,8 +24,8 @@ import org.apache.hadoop.metrics2.MetricsCollector;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
-import org.cloudera.htrace.Span;
-import org.cloudera.htrace.impl.MilliSpan;
+import org.apache.htrace.Span;
+import org.apache.htrace.impl.MilliSpan;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.mockito.Mockito;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/phoenix-flume/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-flume/pom.xml b/phoenix-flume/pom.xml
index af01f6b..1777aa8 100644
--- a/phoenix-flume/pom.xml
+++ b/phoenix-flume/pom.xml
@@ -71,12 +71,12 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.cloudera.htrace</groupId>
+      <groupId>org.apache.htrace</groupId>
       <artifactId>htrace-core</artifactId>
     </dependency>
     <dependency>
       <groupId>io.netty</groupId>
-      <artifactId>netty</artifactId>
+      <artifactId>netty-all</artifactId>
     </dependency>
     <dependency>
       <groupId>commons-codec</groupId>

http://git-wip-us.apache.org/repos/asf/phoenix/blob/a29e163f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 92b0ed6..0e656e7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -75,12 +75,12 @@
     <test.output.tofile>true</test.output.tofile>
 
     <!-- Hadoop Versions -->
-    <hbase.version>0.98.9-hadoop2</hbase.version>
-    <hadoop-two.version>2.2.0</hadoop-two.version>
+    <hbase.version>1.0.1-SNAPSHOT</hbase.version>
+    <hadoop-two.version>2.5.1</hadoop-two.version>
 
     <!-- Dependency versions -->
     <commons-cli.version>1.2</commons-cli.version>
-    <hadoop.version>1.0.4</hadoop.version>
+    <hadoop.version>2.5.1</hadoop.version>
     <pig.version>0.12.0</pig.version>
     <jackson.version>1.8.8</jackson.version>
     <antlr.version>3.5</antlr.version>
@@ -99,9 +99,9 @@
     <findbugs.version>1.3.2</findbugs.version>
     <jline.version>2.11</jline.version>
     <snappy.version>0.3</snappy.version>
-    <netty.version>3.6.6.Final</netty.version>
+    <netty.version>4.0.23.Final</netty.version>
     <commons-codec.version>1.7</commons-codec.version>
-    <htrace.version>2.04</htrace.version>
+    <htrace.version>3.1.0-incubating</htrace.version>
     <collections.version>3.2.1</collections.version>
     <jodatime.version>2.3</jodatime.version>
 
@@ -626,13 +626,13 @@
         <version>${slf4j.version}</version>
       </dependency>
       <dependency>
-        <groupId>org.cloudera.htrace</groupId>
+        <groupId>org.apache.htrace</groupId>
         <artifactId>htrace-core</artifactId>
         <version>${htrace.version}</version>
       </dependency>
       <dependency>
         <groupId>io.netty</groupId>
-        <artifactId>netty</artifactId>
+        <artifactId>netty-all</artifactId>
         <version>${netty.version}</version>
       </dependency>
       <dependency>