You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by da...@apache.org on 2015/05/14 00:43:19 UTC

svn commit: r1679288 - in /pig/branches/branch-0.15: CHANGES.txt ivy.xml ivy/libraries.properties src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java test/org/apache/pig/test/TestHBaseStorage.java

Author: daijy
Date: Wed May 13 22:43:18 2015
New Revision: 1679288

URL: http://svn.apache.org/r1679288
Log:
PIG-4544: Upgrade Hbase to 0.98.12

Modified:
    pig/branches/branch-0.15/CHANGES.txt
    pig/branches/branch-0.15/ivy.xml
    pig/branches/branch-0.15/ivy/libraries.properties
    pig/branches/branch-0.15/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java
    pig/branches/branch-0.15/test/org/apache/pig/test/TestHBaseStorage.java

Modified: pig/branches/branch-0.15/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.15/CHANGES.txt?rev=1679288&r1=1679287&r2=1679288&view=diff
==============================================================================
--- pig/branches/branch-0.15/CHANGES.txt (original)
+++ pig/branches/branch-0.15/CHANGES.txt Wed May 13 22:43:18 2015
@@ -66,6 +66,8 @@ PIG-4333: Split BigData tests into multi
  
 BUG FIXES
 
+PIG-4544: Upgrade Hbase to 0.98.12 (daijy)
+
 PIG-4481: e2e tests ComputeSpec_1, ComputeSpec_2 and StreamingPerformance_3 produce different result on Windows (daijy)
 
 PIG-4496: Fix CBZip2InputStream to close underlying stream (petersla via daijy)

Modified: pig/branches/branch-0.15/ivy.xml
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.15/ivy.xml?rev=1679288&r1=1679287&r2=1679288&view=diff
==============================================================================
--- pig/branches/branch-0.15/ivy.xml (original)
+++ pig/branches/branch-0.15/ivy.xml Wed May 13 22:43:18 2015
@@ -187,12 +187,14 @@
       conf="hadoop20->default;checkstyle->master">
       <exclude org="org.codehaus.jackson" module="jackson-core-asl"/>
       <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
+      <exclude org="io.netty" module="netty"/>
     </dependency>
     <dependency org="org.apache.avro" name="avro-mapred" rev="${avro.version}"
       conf="hadoop23->default;checkstyle->master">
       <artifact name="avro-mapred" type="jar" m:classifier="hadoop2"/>
       <exclude org="org.codehaus.jackson" module="jackson-core-asl"/>
       <exclude org="org.codehaus.jackson" module="jackson-mapper-asl"/>
+      <exclude org="io.netty" module="netty"/>
     </dependency>
     <dependency org="org.apache.avro" name="trevni-core" rev="${avro.version}"
       conf="compile->default;checkstyle->master">
@@ -247,7 +249,7 @@
     <dependency org="org.antlr" name="antlr-runtime" rev="${antlr.version}" conf="compile->default"/>
     <dependency org="org.antlr" name="ST4" rev="${stringtemplate.version}" conf="compile->default"/>
     <dependency org="org.apache.zookeeper" name="zookeeper" rev="${zookeeper.version}" conf="compile->master"/>
-    <dependency org="org.jboss.netty" name="netty" rev="3.2.2.Final" conf="test->master"/>
+    <dependency org="io.netty" name="netty" rev="${netty.version}" conf="test->master"/>
     <dependency org="dk.brics.automaton" name="automaton" rev="1.11-8" conf="compile->default"/>
 
     <dependency org="org.jruby" name="jruby-complete" rev="${jruby.version}" conf="compile->master"/>
@@ -352,12 +354,14 @@
       <exclude org="asm" module="asm"/>
     </dependency>
 
-    <dependency org="org.htrace" name="htrace-core" rev="${htrace.version}" conf="hadoop23->master"/>
+    <dependency org="org.htrace" name="htrace-core" rev="3.0.4" conf="hadoop23->master"/>
+    <dependency org="org.apache.htrace" name="htrace-core" rev="${htrace.version}" conf="hadoop23->master"/>
     <dependency org="org.fusesource.leveldbjni" name="leveldbjni-all" rev="${leveldbjni.version}"
       conf="hadoop23->master"/>
     <dependency org="org.cloudera.htrace" name="htrace-core" rev="2.00" conf="hbase95->master">
       <artifact name="htrace-core" type="jar"/>
     </dependency>
+    <dependency org="com.lmax" name="disruptor" rev="3.3.0" conf="hbase95->master"/>
 
     <!-- for TestHBaseStorage -->
     <dependency org="com.github.stephenc.high-scale-lib" name="high-scale-lib" rev="${high-scale-lib.version}"

Modified: pig/branches/branch-0.15/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.15/ivy/libraries.properties?rev=1679288&r1=1679287&r2=1679288&view=diff
==============================================================================
--- pig/branches/branch-0.15/ivy/libraries.properties (original)
+++ pig/branches/branch-0.15/ivy/libraries.properties Wed May 13 22:43:18 2015
@@ -45,7 +45,7 @@ hadoop-common.version=2.6.0
 hadoop-hdfs.version=2.6.0
 hadoop-mapreduce.version=2.6.0
 hbase94.version=0.94.1
-hbase95.version=0.96.0-${hbase.hadoop.version}
+hbase95.version=0.98.12-${hbase.hadoop.version}
 hsqldb.version=1.8.0.10
 hive.version=0.14.0
 httpcomponents.version=4.1
@@ -71,7 +71,7 @@ rhino.version=1.7R2
 antlr.version=3.4
 stringtemplate.version=4.0.4
 log4j.version=1.2.16
-netty.version=3.2.2
+netty.version=3.6.6.Final
 rats-lib.version=0.5.1
 slf4j-api.version=1.6.1
 slf4j-log4j12.version=1.6.1
@@ -97,4 +97,4 @@ parquet-pig-bundle.version=1.2.3
 snappy.version=0.2
 leveldbjni.version=1.8
 curator.version=2.6.0
-htrace.version=3.0.4
+htrace.version=3.1.0-incubating

Modified: pig/branches/branch-0.15/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.15/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java?rev=1679288&r1=1679287&r2=1679288&view=diff
==============================================================================
--- pig/branches/branch-0.15/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java (original)
+++ pig/branches/branch-0.15/src/org/apache/pig/backend/hadoop/hbase/HBaseStorage.java Wed May 13 22:43:18 2015
@@ -1046,7 +1046,8 @@ public class HBaseStorage extends LoadFu
      * @throws IOException
      */
     public Delete createDelete(Object key, byte type, long timestamp) throws IOException {
-        Delete delete = new Delete(objToBytes(key, type), timestamp);
+        Delete delete = new Delete(objToBytes(key, type));
+        delete.setTimestamp(timestamp);
 
         if(noWAL_) {
             delete.setWriteToWAL(false);

Modified: pig/branches/branch-0.15/test/org/apache/pig/test/TestHBaseStorage.java
URL: http://svn.apache.org/viewvc/pig/branches/branch-0.15/test/org/apache/pig/test/TestHBaseStorage.java?rev=1679288&r1=1679287&r2=1679288&view=diff
==============================================================================
--- pig/branches/branch-0.15/test/org/apache/pig/test/TestHBaseStorage.java (original)
+++ pig/branches/branch-0.15/test/org/apache/pig/test/TestHBaseStorage.java Wed May 13 22:43:18 2015
@@ -1272,13 +1272,27 @@ public class TestHBaseStorage {
      * @throws ParseException
      */
     @Test
-    public void testNoWAL() throws IOException, ParseException {
+    public void testNoWAL() throws Exception {
         HBaseStorage hbaseStorage = new HBaseStorage(TESTCOLUMN_A, "-noWAL");
 
         Object key = "somekey";
         byte type = DataType.CHARARRAY;
-        Assert.assertFalse(hbaseStorage.createPut(key, type).getWriteToWAL());
-        Assert.assertFalse(hbaseStorage.createDelete(key, type, System.currentTimeMillis()).getWriteToWAL());
+        Put put = hbaseStorage.createPut(key, type);
+        Delete delete = hbaseStorage.createDelete(key, type, System.currentTimeMillis());
+        boolean hasDurabilityMethod = false;
+        try {
+            put.getClass().getMethod("getDurability");
+            hasDurabilityMethod = true;
+        } catch (NoSuchMethodException e) {
+        }
+        if (hasDurabilityMethod) { // Hbase version 0.95+
+            Object skipWal = Class.forName("org.apache.hadoop.hbase.client.Durability").getField("SKIP_WAL").get(put);
+            Assert.assertEquals(put.getClass().getMethod("getDurability").invoke(put), skipWal);
+            Assert.assertEquals(delete.getClass().getMethod("getDurability").invoke(delete), skipWal);
+        } else {
+            Assert.assertFalse(put.getWriteToWAL());
+            Assert.assertFalse(delete.getWriteToWAL());
+        }
     }
 
     /**
@@ -1287,13 +1301,27 @@ public class TestHBaseStorage {
      * @throws ParseException
      */
     @Test
-    public void testWIthWAL() throws IOException, ParseException {
+    public void testWIthWAL() throws Exception {
         HBaseStorage hbaseStorage = new HBaseStorage(TESTCOLUMN_A);
 
         Object key = "somekey";
         byte type = DataType.CHARARRAY;
-        Assert.assertTrue(hbaseStorage.createPut(key, type).getWriteToWAL());
-        Assert.assertTrue(hbaseStorage.createDelete(key, type, System.currentTimeMillis()).getWriteToWAL());
+        Put put = hbaseStorage.createPut(key, type);
+        Delete delete = hbaseStorage.createDelete(key, type, System.currentTimeMillis());
+        boolean hasDurabilityMethod = false;
+        try {
+            put.getClass().getMethod("getDurability");
+            hasDurabilityMethod = true;
+        } catch (NoSuchMethodException e) {
+        }
+        if (hasDurabilityMethod) { // Hbase version 0.95+
+            Object skipWal = Class.forName("org.apache.hadoop.hbase.client.Durability").getField("SKIP_WAL").get(put);
+            Assert.assertNotEquals(put.getClass().getMethod("getDurability").invoke(put), skipWal);
+            Assert.assertNotEquals(delete.getClass().getMethod("getDurability").invoke(delete), skipWal);
+        } else {
+            Assert.assertTrue(put.getWriteToWAL());
+            Assert.assertTrue(delete.getWriteToWAL());
+        }
     }
 
     /**
@@ -1552,7 +1580,7 @@ public class TestHBaseStorage {
      */
     private static long getColTimestamp(Result result, String colName) {
         byte[][] colArray = Bytes.toByteArrays(colName.split(":"));
-        return result.getColumnLatestCell(colArray[0], colArray[1]).getTimestamp();
+        return result.getColumnLatest(colArray[0], colArray[1]).getTimestamp();
     }
 
 }