You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/07/13 14:36:53 UTC

[1/4] hbase git commit: HBASE-18365 Eliminate the findbugs warnings for hbase-common [Forced Update!]

Repository: hbase
Updated Branches:
  refs/heads/HBASE-18147 96ba853b8 -> 6d6f8f76b (forced update)
  refs/heads/branch-1.2-HBASE-18147 93f17f691 -> 9f8ee898c (forced update)


HBASE-18365 Eliminate the findbugs warnings for hbase-common


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf636e50
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf636e50
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf636e50

Branch: refs/heads/HBASE-18147
Commit: cf636e50b9d2afbf0d017f2463b510ec10653a1a
Parents: 22df926
Author: Chia-Ping Tsai <ch...@gmail.com>
Authored: Thu Jul 13 19:31:59 2017 +0800
Committer: Chia-Ping Tsai <ch...@gmail.com>
Committed: Thu Jul 13 19:31:59 2017 +0800

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hbase/util/DynamicClassLoader.java   | 5 +++--
 .../main/java/org/apache/hadoop/hbase/util/OrderedBytes.java    | 2 +-
 2 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/cf636e50/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
index 1a73069..a805fbf 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
@@ -179,8 +179,9 @@ public class DynamicClassLoader extends ClassLoaderBase {
 
   private synchronized void loadNewJars() {
     // Refresh local jar file lists
-    if (localDir != null) {
-      for (File file : localDir.listFiles()) {
+    File[] files = localDir == null ? null : localDir.listFiles();
+    if (files != null) {
+      for (File file : files) {
         String fileName = file.getName();
         if (jarModifiedTime.containsKey(fileName)) {
           continue;

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf636e50/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
index 3e4bc6c..7ba17f9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
@@ -461,7 +461,7 @@ public class OrderedBytes {
   static int lengthVaruint64(PositionedByteRange src, boolean comp) {
     int a0 = (comp ? DESCENDING : ASCENDING).apply(src.peek()) & 0xff;
     if (a0 <= 240) return 1;
-    if (a0 >= 241 && a0 <= 248) return 2;
+    if (a0 <= 248) return 2;
     if (a0 == 249) return 3;
     if (a0 == 250) return 4;
     if (a0 == 251) return 5;


[2/4] hbase git commit: HBASE-18344 Introduce Append.addColumn as a replacement for Append.add

Posted by bu...@apache.org.
HBASE-18344 Introduce Append.addColumn as a replacement for Append.add

Signed-off-by: Chia-Ping Tsai <ch...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c0725ddf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c0725ddf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c0725ddf

Branch: refs/heads/HBASE-18147
Commit: c0725ddff11992931fa6e2e5c454177df60da585
Parents: cf636e5
Author: Jan Hentschel <ja...@ultratendency.com>
Authored: Sun Jul 9 12:11:31 2017 +0200
Committer: Chia-Ping Tsai <ch...@gmail.com>
Committed: Thu Jul 13 20:04:57 2017 +0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/client/Append.java  | 16 ++++++++++-
 .../hbase/client/TestRpcControllerFactory.java  |  2 +-
 .../hadoop/hbase/PerformanceEvaluation.java     |  2 +-
 .../hadoop/hbase/client/TestAsyncTable.java     |  2 +-
 .../hbase/client/TestAsyncTableBatch.java       |  2 +-
 .../hbase/client/TestAsyncTableNoncedRetry.java |  4 +--
 .../hadoop/hbase/client/TestFromClientSide.java | 28 ++++++++++----------
 .../hbase/client/TestFromClientSide3.java       |  2 +-
 .../hadoop/hbase/client/TestMetaCache.java      |  2 +-
 .../hadoop/hbase/client/TestMultiParallel.java  |  4 +--
 .../client/TestPutDeleteEtcCellIteration.java   |  2 +-
 .../hbase/client/TestResultFromCoprocessor.java |  2 +-
 .../hbase/coprocessor/TestHTableWrapper.java    |  2 +-
 .../TestRegionObserverInterface.java            |  2 +-
 .../hadoop/hbase/quotas/TestSpaceQuotas.java    |  4 +--
 .../hbase/regionserver/TestAtomicOperation.java | 18 ++++++-------
 .../hadoop/hbase/regionserver/TestHRegion.java  |  6 ++---
 .../regionserver/TestRegionServerMetrics.java   |  2 +-
 .../TestRegionServerReadRequestMetrics.java     |  2 +-
 .../hadoop/hbase/regionserver/TestTags.java     |  2 +-
 .../security/access/TestAccessController.java   |  2 +-
 .../visibility/TestVisibilityLabels.java        |  4 +--
 .../TestVisibilityWithCheckAuths.java           |  4 +--
 .../hadoop/hbase/util/MultiThreadedUpdater.java |  6 ++---
 .../hadoop/hbase/thrift/ThriftUtilities.java    |  2 +-
 .../hadoop/hbase/thrift2/ThriftUtilities.java   |  2 +-
 26 files changed, 70 insertions(+), 56 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index a655c7d..346eb0e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.Bytes;
  * <p>
  * To append to a set of columns of a row, instantiate an Append object with the
  * row to append to. At least one column to append must be specified using the
- * {@link #add(byte[], byte[], byte[])} method.
+ * {@link #addColumn(byte[], byte[], byte[])} method.
  */
 @InterfaceAudience.Public
 public class Append extends Mutation {
@@ -104,8 +104,22 @@ public class Append extends Mutation {
    * @param qualifier column qualifier
    * @param value value to append to specified column
    * @return this
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
+   *             Use {@link #addColumn(byte[], byte[], byte[])} instead
    */
+  @Deprecated
   public Append add(byte [] family, byte [] qualifier, byte [] value) {
+    return this.addColumn(family, qualifier, value);
+  }
+
+  /**
+   * Add the specified column and value to this Append operation.
+   * @param family family name
+   * @param qualifier column qualifier
+   * @param value value to append to specified column
+   * @return this
+   */
+  public Append addColumn(byte[] family, byte[] qualifier, byte[] value) {
     KeyValue kv = new KeyValue(this.row, family, qualifier, this.ts, KeyValue.Type.Put, value);
     return add(kv);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
----------------------------------------------------------------------
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
index 6aaef80..567f30c 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
@@ -163,7 +163,7 @@ public class TestRpcControllerFactory {
     counter = verifyCount(counter);
 
     Append append = new Append(row);
-    append.add(fam1, fam1, Bytes.toBytes("val2"));
+    append.addColumn(fam1, fam1, Bytes.toBytes("val2"));
     table.append(append);
     counter = verifyCount(counter);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 2c5cb65..8abef2b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -1558,7 +1558,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
     void testRow(final int i) throws IOException {
       byte [] bytes = format(i);
       Append append = new Append(bytes);
-      append.add(FAMILY_NAME, getQualifier(), bytes);
+      append.addColumn(FAMILY_NAME, getQualifier(), bytes);
       updateValueSize(this.table.append(append));
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTable.java
index 593c88e..2fea0eb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTable.java
@@ -199,7 +199,7 @@ public class TestAsyncTable {
     char suffix = ':';
     AtomicLong suffixCount = new AtomicLong(0L);
     IntStream.range(0, count).forEachOrdered(
-      i -> table.append(new Append(row).add(FAMILY, QUALIFIER, Bytes.toBytes("" + i + suffix)))
+      i -> table.append(new Append(row).addColumn(FAMILY, QUALIFIER, Bytes.toBytes("" + i + suffix)))
           .thenAccept(r -> {
             suffixCount.addAndGet(Bytes.toString(r.getValue(FAMILY, QUALIFIER)).chars()
                 .filter(x -> x == suffix).count());

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
index 61ff2be..6c9dd86 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableBatch.java
@@ -186,7 +186,7 @@ public class TestAsyncTableBatch {
     actions.add(new Put(Bytes.toBytes(1)).addColumn(FAMILY, CQ, Bytes.toBytes((long) 2)));
     actions.add(new Delete(Bytes.toBytes(2)));
     actions.add(new Increment(Bytes.toBytes(3)).addColumn(FAMILY, CQ, 1));
-    actions.add(new Append(Bytes.toBytes(4)).add(FAMILY, CQ, Bytes.toBytes(4)));
+    actions.add(new Append(Bytes.toBytes(4)).addColumn(FAMILY, CQ, Bytes.toBytes(4)));
     List<Object> results = table.batchAll(actions).get();
     assertEquals(5, results.size());
     Result getResult = (Result) results.get(0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableNoncedRetry.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableNoncedRetry.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableNoncedRetry.java
index 3f7d143..0df8ce7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableNoncedRetry.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableNoncedRetry.java
@@ -103,9 +103,9 @@ public class TestAsyncTableNoncedRetry {
   @Test
   public void testAppend() throws InterruptedException, ExecutionException {
     RawAsyncTable table = ASYNC_CONN.getRawTable(TABLE_NAME);
-    Result result = table.append(new Append(row).add(FAMILY, QUALIFIER, VALUE)).get();
+    Result result = table.append(new Append(row).addColumn(FAMILY, QUALIFIER, VALUE)).get();
     assertArrayEquals(VALUE, result.getValue(FAMILY, QUALIFIER));
-    result = table.append(new Append(row).add(FAMILY, QUALIFIER, VALUE)).get();
+    result = table.append(new Append(row).addColumn(FAMILY, QUALIFIER, VALUE)).get();
     // the second call should have no effect as we always generate the same nonce.
     assertArrayEquals(VALUE, result.getValue(FAMILY, QUALIFIER));
     result = table.get(new Get(row)).get();

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index dd9024a..a93fbb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -209,7 +209,7 @@ public class TestFromClientSide {
 
       try {
         Append append = new Append(ROW);
-        append.add(TEST_UTIL.fam1, QUALIFIER, VALUE);
+        append.addColumn(TEST_UTIL.fam1, QUALIFIER, VALUE);
         Result result = table.append(append);
 
         // Verify expected result
@@ -1463,7 +1463,7 @@ public class TestFromClientSide {
     table.delete(delete);
 
     Append append = new Append(ROW);
-    append.add(FAMILY, null, VALUE);
+    append.addColumn(FAMILY, null, VALUE);
     table.append(append);
     getTestNull(table, ROW, FAMILY, VALUE);
 
@@ -4625,10 +4625,10 @@ public class TestFromClientSide {
     Table table = TEST_UTIL.createTable(tableName, FAMILY);
     Append append1 = new Append(Bytes.toBytes("row1"));
     append1.setReturnResults(false);
-    append1.add(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value1"));
+    append1.addColumn(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value1"));
     Append append2 = new Append(Bytes.toBytes("row1"));
     append2.setReturnResults(false);
-    append2.add(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value2"));
+    append2.addColumn(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value2"));
     List<Append> appends = new ArrayList<>();
     appends.add(append1);
     appends.add(append2);
@@ -4653,15 +4653,15 @@ public class TestFromClientSide {
         Bytes.toBytes("b"), Bytes.toBytes("a"), Bytes.toBytes("c")
     };
     Append a = new Append(ROW);
-    a.add(FAMILY, QUALIFIERS[0], v1);
-    a.add(FAMILY, QUALIFIERS[1], v2);
+    a.addColumn(FAMILY, QUALIFIERS[0], v1);
+    a.addColumn(FAMILY, QUALIFIERS[1], v2);
     a.setReturnResults(false);
     assertEmptyResult(t.append(a));
 
     a = new Append(ROW);
-    a.add(FAMILY, QUALIFIERS[0], v2);
-    a.add(FAMILY, QUALIFIERS[1], v1);
-    a.add(FAMILY, QUALIFIERS[2], v2);
+    a.addColumn(FAMILY, QUALIFIERS[0], v2);
+    a.addColumn(FAMILY, QUALIFIERS[1], v1);
+    a.addColumn(FAMILY, QUALIFIERS[2], v2);
     Result r = t.append(a);
     assertEquals(0, Bytes.compareTo(Bytes.add(v1, v2), r.getValue(FAMILY, QUALIFIERS[0])));
     assertEquals(0, Bytes.compareTo(Bytes.add(v2, v1), r.getValue(FAMILY, QUALIFIERS[1])));
@@ -4683,16 +4683,16 @@ public class TestFromClientSide {
     Put put_1 = new Put(row3);
     put_1.addColumn(FAMILY, qual, Bytes.toBytes("put"));
     Append append_0 = new Append(row1);
-    append_0.add(FAMILY, qual, Bytes.toBytes("i"));
+    append_0.addColumn(FAMILY, qual, Bytes.toBytes("i"));
     Append append_1 = new Append(row1);
-    append_1.add(FAMILY, qual, Bytes.toBytes("k"));
+    append_1.addColumn(FAMILY, qual, Bytes.toBytes("k"));
     Append append_2 = new Append(row1);
-    append_2.add(FAMILY, qual, Bytes.toBytes("e"));
+    append_2.addColumn(FAMILY, qual, Bytes.toBytes("e"));
     if (!walUsed) {
       append_2.setDurability(Durability.SKIP_WAL);
     }
     Append append_3 = new Append(row1);
-    append_3.add(FAMILY, qual, Bytes.toBytes("a"));
+    append_3.addColumn(FAMILY, qual, Bytes.toBytes("a"));
     Scan s = new Scan();
     s.setCaching(1);
     t.append(append_0);
@@ -6416,7 +6416,7 @@ public class TestFromClientSide {
         // expected
       }
       try {
-        t.append(new Append(ROW).add(FAMILY, QUALIFIER, new byte[10 * 1024]));
+        t.append(new Append(ROW).addColumn(FAMILY, QUALIFIER, new byte[10 * 1024]));
         fail("Oversize cell failed to trigger exception");
       } catch (IOException e) {
         // expected

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 9eaa716..668bfbb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -597,7 +597,7 @@ public class TestFromClientSide3 {
       ExecutorService appendService = Executors.newSingleThreadExecutor();
       appendService.execute(() -> {
         Append append = new Append(ROW);
-        append.add(FAMILY, QUALIFIER, VALUE);
+        append.addColumn(FAMILY, QUALIFIER, VALUE);
         try (Table table = con.getTable(tableName)) {
           table.append(append);
           fail("The APPEND should fail because the target lock is blocked by previous put");

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
index 7a32e6a..883ae3e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java
@@ -103,7 +103,7 @@ public class TestMetaCache {
       put.addColumn(FAMILY, QUALIFIER, Bytes.toBytes(10));
       Get get = new Get(row);
       Append append = new Append(row);
-      append.add(FAMILY, QUALIFIER, Bytes.toBytes(11));
+      append.addColumn(FAMILY, QUALIFIER, Bytes.toBytes(11));
       Increment increment = new Increment(row);
       increment.addColumn(FAMILY, QUALIFIER, 10);
       Delete delete = new Delete(row);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index a3bcc76..0454ed8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -476,8 +476,8 @@ public class TestMultiParallel {
     inc.addColumn(BYTES_FAMILY, QUAL3, 1);
 
     Append a = new Append(ONE_ROW);
-    a.add(BYTES_FAMILY, QUAL1, Bytes.toBytes("def"));
-    a.add(BYTES_FAMILY, QUAL4, Bytes.toBytes("xyz"));
+    a.addColumn(BYTES_FAMILY, QUAL1, Bytes.toBytes("def"));
+    a.addColumn(BYTES_FAMILY, QUAL4, Bytes.toBytes("xyz"));
     List<Row> actions = new ArrayList<>();
     actions.add(inc);
     actions.add(a);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index 60eb92f..53cbd0f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -100,7 +100,7 @@ public class TestPutDeleteEtcCellIteration {
     Append a = new Append(ROW);
     for (int i = 0; i < COUNT; i++) {
       byte [] bytes = Bytes.toBytes(i);
-      a.add(bytes, bytes, bytes);
+      a.addColumn(bytes, bytes, bytes);
     }
     int index = 0;
     for (CellScanner cellScanner = a.cellScanner(); cellScanner.advance();) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
index 4a81cc2..ebbf47a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
@@ -74,7 +74,7 @@ public class TestResultFromCoprocessor {
       t.put(put);
       assertRowAndValue(t.get(new Get(ROW)), ROW, VALUE);
       Append append = new Append(ROW);
-      append.add(FAMILY, QUAL, FIXED_VALUE);
+      append.addColumn(FAMILY, QUAL, FIXED_VALUE);
       assertRowAndValue(t.append(append), ROW, FIXED_VALUE);
       assertRowAndValue(t.get(new Get(ROW)), ROW, Bytes.add(VALUE, FIXED_VALUE));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
index 9f20ba2..4f80876 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
@@ -196,7 +196,7 @@ public class TestHTableWrapper {
 
   private void checkAppend() throws IOException {
     final byte[] appendValue = Bytes.toBytes("append");
-    Append append = new Append(qualifierCol1).add(TEST_FAMILY, qualifierCol1, appendValue);
+    Append append = new Append(qualifierCol1).addColumn(TEST_FAMILY, qualifierCol1, appendValue);
     Result appendResult = hTableInterface.append(append);
     byte[] appendedRow = appendResult.getRow();
     checkRowValue(appendedRow, appendValue);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 7b4cc40..c4924bb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -286,7 +286,7 @@ public class TestRegionObserverInterface {
     Table table = util.createTable(tableName, new byte[][] { A, B, C });
     try {
       Append app = new Append(Bytes.toBytes(0));
-      app.add(A, A, A);
+      app.addColumn(A, A, A);
 
       verifyMethodResult(SimpleRegionObserver.class,
         new String[] { "hadPreAppend", "hadPostAppend", "hadPreAppendAfterRowLock" }, tableName,

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
index 888ad9e..83108c6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotas.java
@@ -126,7 +126,7 @@ public class TestSpaceQuotas {
   @Test
   public void testNoInsertsWithAppend() throws Exception {
     Append a = new Append(Bytes.toBytes("to_reject"));
-    a.add(
+    a.addColumn(
         Bytes.toBytes(SpaceQuotaHelperForTests.F1), Bytes.toBytes("to"), Bytes.toBytes("reject"));
     writeUntilViolationAndVerifyViolation(SpaceViolationPolicy.NO_INSERTS, a);
   }
@@ -162,7 +162,7 @@ public class TestSpaceQuotas {
   @Test
   public void testNoWritesWithAppend() throws Exception {
     Append a = new Append(Bytes.toBytes("to_reject"));
-    a.add(
+    a.addColumn(
         Bytes.toBytes(SpaceQuotaHelperForTests.F1), Bytes.toBytes("to"), Bytes.toBytes("reject"));
     writeUntilViolationAndVerifyViolation(SpaceViolationPolicy.NO_WRITES, a);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index 1cef625..1936c98 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -131,12 +131,12 @@ public class TestAtomicOperation {
     String v2 = " is... 42.";
     Append a = new Append(row);
     a.setReturnResults(false);
-    a.add(fam1, qual1, Bytes.toBytes(v1));
-    a.add(fam1, qual2, Bytes.toBytes(v2));
+    a.addColumn(fam1, qual1, Bytes.toBytes(v1));
+    a.addColumn(fam1, qual2, Bytes.toBytes(v2));
     assertTrue(region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE).isEmpty());
     a = new Append(row);
-    a.add(fam1, qual1, Bytes.toBytes(v2));
-    a.add(fam1, qual2, Bytes.toBytes(v1));
+    a.addColumn(fam1, qual1, Bytes.toBytes(v2));
+    a.addColumn(fam1, qual2, Bytes.toBytes(v1));
     Result result = region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE);
     assertEquals(0, Bytes.compareTo(Bytes.toBytes(v1+v2), result.getValue(fam1, qual1)));
     assertEquals(0, Bytes.compareTo(Bytes.toBytes(v2+v1), result.getValue(fam1, qual2)));
@@ -147,8 +147,8 @@ public class TestAtomicOperation {
     initHRegion(tableName, name.getMethodName(), fam1);
     final String v1 = "Value";
     final Append a = new Append(row);
-    a.add(fam1, qual1, Bytes.toBytes(v1));
-    a.add(fam2, qual2, Bytes.toBytes(v1));
+    a.addColumn(fam1, qual1, Bytes.toBytes(v1));
+    a.addColumn(fam2, qual2, Bytes.toBytes(v1));
     Result result = null;
     try {
       result = region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE);
@@ -327,9 +327,9 @@ public class TestAtomicOperation {
           for (int i=0; i<numOps; i++) {
             try {
               Append a = new Append(row);
-              a.add(fam1, qual1, val);
-              a.add(fam1, qual2, val);
-              a.add(fam2, qual3, val);
+              a.addColumn(fam1, qual1, val);
+              a.addColumn(fam1, qual2, val);
+              a.addColumn(fam2, qual3, val);
               a.setDurability(Durability.ASYNC_WAL);
               region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 7c4e329..569f4f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1387,7 +1387,7 @@ public class TestHRegion {
     boolean exceptionCaught = false;
     Append append = new Append(Bytes.toBytes("somerow"));
     append.setDurability(Durability.SKIP_WAL);
-    append.add(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
+    append.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
         Bytes.toBytes("somevalue"));
     try {
       region.append(append);
@@ -4364,7 +4364,7 @@ public class TestHRegion {
       int count = 0;
       while (count < appendCounter) {
         Append app = new Append(appendRow);
-        app.add(family, qualifier, CHAR);
+        app.addColumn(family, qualifier, CHAR);
         count++;
         try {
           region.append(app);
@@ -6167,7 +6167,7 @@ public class TestHRegion {
     edge.setValue(10);
     Append a = new Append(row);
     a.setDurability(Durability.SKIP_WAL);
-    a.add(fam1, qual1, qual1);
+    a.addColumn(fam1, qual1, qual1);
     region.append(a);
 
     Result result = region.get(new Get(row));

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index f08fba0..128b010 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -365,7 +365,7 @@ public class TestRegionServerMetrics {
 
     for(int count = 0; count< 73; count++) {
       Append append = new Append(row);
-      append.add(cf, qualifier, Bytes.toBytes(",Test"));
+      append.addColumn(cf, qualifier, Bytes.toBytes(",Test"));
       table.append(append);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
index 6867b99..9242c0c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java
@@ -284,7 +284,7 @@ public class TestRegionServerReadRequestMetrics {
 
     // test for append
     append = new Append(ROW1);
-    append.add(CF1, COL2, VAL2);
+    append.addColumn(CF1, COL2, VAL2);
     result = table.append(append);
     resultCount = result.isEmpty() ? 0 : 1;
     testReadRequests(resultCount, 1, 0);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
index 40eebb6..e83f1b2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
@@ -456,7 +456,7 @@ public class TestTags {
       put.setAttribute("visibility", Bytes.toBytes("tag1"));
       table.put(put);
       Append append = new Append(row3);
-      append.add(f, q, Bytes.toBytes("b"));
+      append.addColumn(f, q, Bytes.toBytes("b"));
       table.append(append);
       TestCoprocessorForTags.checkTagPresence = true;
       scanner = table.getScanner(new Scan().setStartRow(row3));

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index bcf8670..5ecc471 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -1163,7 +1163,7 @@ public class TestAccessController extends SecureTestUtil {
         Put put = new Put(row);
         put.addColumn(TEST_FAMILY, qualifier, Bytes.toBytes(1));
         Append append = new Append(row);
-        append.add(TEST_FAMILY, qualifier, Bytes.toBytes(2));
+        append.addColumn(TEST_FAMILY, qualifier, Bytes.toBytes(2));
         try(Connection conn = ConnectionFactory.createConnection(conf);
             Table t = conn.getTable(TEST_TABLE)) {
           t.put(put);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index 7ac5f34..2b5c78c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -636,12 +636,12 @@ public abstract class TestVisibilityLabels {
       Result result = table.get(get);
       assertTrue(result.isEmpty());
       Append append = new Append(row1);
-      append.add(fam, qual, Bytes.toBytes("b"));
+      append.addColumn(fam, qual, Bytes.toBytes("b"));
       table.append(append);
       result = table.get(get);
       assertTrue(result.isEmpty());
       append = new Append(row1);
-      append.add(fam, qual, Bytes.toBytes("c"));
+      append.addColumn(fam, qual, Bytes.toBytes("c"));
       append.setCellVisibility(new CellVisibility(SECRET));
       table.append(append);
       result = table.get(get);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
index b24deb3..e3d485b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
@@ -191,7 +191,7 @@ public class TestVisibilityWithCheckAuths {
           try (Connection connection = ConnectionFactory.createConnection(conf);
                Table table = connection.getTable(tableName)) {
             Append append = new Append(row1);
-            append.add(fam, qual, Bytes.toBytes("b"));
+            append.addColumn(fam, qual, Bytes.toBytes("b"));
             table.append(append);
           }
           return null;
@@ -204,7 +204,7 @@ public class TestVisibilityWithCheckAuths {
           try (Connection connection = ConnectionFactory.createConnection(conf);
                Table table = connection.getTable(tableName)) {
             Append append = new Append(row1);
-            append.add(fam, qual, Bytes.toBytes("c"));
+            append.addColumn(fam, qual, Bytes.toBytes("c"));
             append.setCellVisibility(new CellVisibility(PUBLIC));
             table.append(append);
             Assert.fail("Testcase should fail with AccesDeniedException");

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index 564c000..4a170a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -151,7 +151,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
               buf.setLength(0); // Clear the buffer
               buf.append("#").append(Bytes.toString(INCREMENT));
               buf.append(":").append(MutationType.INCREMENT.getNumber());
-              app.add(cf, MUTATE_INFO, Bytes.toBytes(buf.toString()));
+              app.addColumn(cf, MUTATE_INFO, Bytes.toBytes(buf.toString()));
               ++columnCount;
               if (!isBatchUpdate) {
                 mutate(table, inc, rowKeyBase);
@@ -220,9 +220,9 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
                     break;
                   default:
                     buf.append(MutationType.APPEND.getNumber());
-                    app.add(cf, column, hashCodeBytes);
+                    app.addColumn(cf, column, hashCodeBytes);
                   }
-                  app.add(cf, MUTATE_INFO, Bytes.toBytes(buf.toString()));
+                  app.addColumn(cf, MUTATE_INFO, Bytes.toBytes(buf.toString()));
                   if (!isBatchUpdate) {
                     mutate(table, app, rowKeyBase);
                     numCols.addAndGet(1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
index 7ec49fb..0301bbc 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
@@ -228,7 +228,7 @@ public class ThriftUtilities {
 
     for (int i = 0; i < length; i++) {
       byte[][] famAndQf = KeyValue.parseColumn(getBytes(columns.get(i)));
-      append.add(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
+      append.addColumn(famAndQf[0], famAndQf[1], getBytes(values.get(i)));
     }
     return append;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c0725ddf/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
index 69015ab..3807bec 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
@@ -482,7 +482,7 @@ public class ThriftUtilities {
   public static Append appendFromThrift(TAppend append) throws IOException {
     Append out = new Append(append.getRow());
     for (TColumnValue column : append.getColumns()) {
-      out.add(column.getFamily(), column.getQualifier(), column.getValue());
+      out.addColumn(column.getFamily(), column.getQualifier(), column.getValue());
     }
 
     if (append.isSetAttributes()) {


[4/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

Posted by bu...@apache.org.
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile
* TODO branch with mulitple jdk versions


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9f8ee898
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9f8ee898
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9f8ee898

Branch: refs/heads/branch-1.2-HBASE-18147
Commit: 9f8ee898c208a652fa5fcc71a71a14a7b394f1f7
Parents: 2857b75
Author: Sean Busbey <se...@gmail.com>
Authored: Tue Jul 4 15:12:38 2017 -0400
Committer: Sean Busbey <bu...@apache.org>
Committed: Thu Jul 13 09:36:27 2017 -0500

----------------------------------------------------------------------
 dev-support/Jenkinsfile       | 193 +++++++++++++++++++++++++++++++++++++
 dev-support/docker/Dockerfile |  29 ++++++
 2 files changed, 222 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/9f8ee898/dev-support/Jenkinsfile
----------------------------------------------------------------------
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 0000000..4fad9c8
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,193 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+    node {
+      label 'Hadoop'
+    }
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+    maven 'Maven (latest)'
+  }
+  triggers {
+    cron('@daily')
+  }
+  options {
+    buildDiscarder(logRotator(numToKeepStr: '30'))
+    timeout (time: 6, unit: 'HOURS')
+    timestamps()
+  }
+  environment {
+    TOOLS = "${env.WORKSPACE}/tools"
+    // where we check out to across stages
+    BASEDIR = "${env.WORKSPACE}/component"
+    YETUS_RELEASE = '0.4.0'
+    // where we'll write everything from different steps.
+    OUTPUT_RELATIVE = 'output'
+    OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+    booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
+
+    Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
+    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
+  }
+  stages {
+    stage ('yetus check') {
+      environment {
+        PROJECT = 'hbase'
+        PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+        // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
+        AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+        WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+        // output from surefire; sadly the archive function in yetus only works on file names.
+        ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
+        TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+        BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+        EXCLUDE_TESTS_URL = 'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+      }
+      steps {
+    // TODO we can move the yetus install into a different stage and then use stash to deploy it.
+        sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+    echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+    rm -rf "${WORKSPACE}/.gpg"
+    mkdir -p "${WORKSPACE}/.gpg"
+    chmod -R 700 "${WORKSPACE}/.gpg"
+
+    echo "install yetus project KEYS"
+    curl -L --fail -o "${WORKSPACE}/KEYS_YETUS" https://dist.apache.org/repos/dist/release/yetus/KEYS
+    gpg --homedir "${WORKSPACE}/.gpg" --import "${WORKSPACE}/KEYS_YETUS"
+
+    echo "download yetus release ${YETUS_RELEASE}"
+    curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
+    curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
+    echo "verifying yetus release"
+    gpg --homedir "${WORKSPACE}/.gpg" --verify "yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
+    mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
+  else
+    echo "Reusing cached download of Apache Yetus version ${YETUS_RELEASE}."
+  fi
+else
+  YETUS_DIR="${WORKSPACE}/yetus-git"
+  rm -rf "${YETUS_DIR}"
+  echo "downloading from github"
+  curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
+fi
+if [ ! -d "${YETUS_DIR}" ]; then
+  echo "unpacking yetus into '${YETUS_DIR}'"
+  mkdir -p "${YETUS_DIR}"
+  gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
+fi
+	'''
+    // TODO we can move the personality install into a different stage and then use stash to deploy it.
+	dir ("${env.TOOLS}") {
+	  sh """#!/usr/bin/env bash
+echo "Downloading Project personality."
+curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
+	"""
+	}
+	sh '''#!/usr/bin/env bash
+YETUS_ARGS=()
+YETUS_ARGS=("--multijdktests=compile,findbugs,unit" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--multijdkdir=/usr/lib/jvm/java-8-oracle" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--personality=${TOOLS}/personality.sh" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--basedir=${BASEDIR}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--archive-list=${ARCHIVE_PATTERN_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--console-urls" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--build-url-patchdir=artifact/${OUTPUT_RELATIVE}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--docker" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--dockerfile=${BRANCH_SPECIFIC_DOCKERFILE}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--empty-patch" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--html-report-file=${OUTPUTDIR}/console-report.html" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--jenkins" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--mvn-custom-repos" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--patch-dir=${OUTPUTDIR}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--project=${PROJECT}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--resetrepo" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--author-ignore-list=${AUTHOR_IGNORE_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--whitespace-eol-ignore-list=${WHITESPACE_IGNORE_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--whitespace-tabs-ignore-list=${WHITESPACE_IGNORE_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--sentinel" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--branch=${BRANCH_NAME}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--tests-filter=${TESTS_FILTER}" "${YETUS_ARGS[@]}")
+
+if [[ true == "${DEBUG}" ]]; then
+  YETUS_ARGS=("--debug" "${YETUS_ARGS[@]}")
+fi
+
+rm -rf "${OUTPUTDIR}"
+mkdir -p "${OUTPUTDIR}"
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_ARGS=("--shelldocs=${WORKSPACE}/yetus-${YETUS_RELEASE}/bin/shelldocs" "${YETUS_ARGS[@]}")
+  TESTPATCHBIN="${WORKSPACE}/yetus-${YETUS_RELEASE}/bin/test-patch"
+else
+  YETUS_ARGS=("--shelldocs=${WORKSPACE}/yetus-git/shelldocs/shelldocs.py" "${YETUS_ARGS[@]}")
+  TESTPATCHBIN="${WORKSPACE}/yetus-git/precommit/test-patch.sh"
+fi
+echo "Launching yetus with command line:"
+echo "${TESTPATCHBIN} ${YETUS_ARGS[*]}"
+
+/usr/bin/env bash "${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
+	'''
+      }
+    }
+  }
+  post {
+    always {
+      // TODO confirm junit step accounts for multijdk results
+      junit 'output/**/target/**/TEST-*.xml'
+      // gzip surefire reports.
+      sh '''#!/bin/bash -e
+        if [ -d "${OUTPUTDIR}/archiver" ]; then
+          count=$(find "${OUTPUTDIR}/archiver" -type f | wc -l)
+          if [[ 0 -ne ${count} ]]; then
+            echo "zipping ${count} archived files"
+            zip -m -r "${OUTPUTDIR}/test_logs.zip" "${OUTPUTDIR}/archiver"
+          else
+            echo "No archived files, skipping compressing."
+          fi
+        else
+          echo "No archiver directory, skipping compressing."
+        fi
+'''
+      // env variables don't work in archive? or this has to be relative to WORKSPACE. :(
+      archive 'output/*'
+      archive 'output/**/*'
+      publishHTML target: [
+        allowMissing: true,
+        keepAll: true,
+        alwaysLinkToLastBuild: true,
+        // has to be relative to WORKSPACE :(
+        reportDir: 'output',
+        reportFiles: 'console-report.html',
+        reportName: 'Nightly Build Report'
+      ]
+    }
+    failure {
+      deleteDir()
+    }
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/9f8ee898/dev-support/docker/Dockerfile
----------------------------------------------------------------------
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index e83600f..9515418 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -47,6 +47,7 @@ RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
     libbz2-dev \
     libcurl4-openssl-dev \
     libfuse-dev \
+    libperl-critic-perl \
     libprotobuf-dev \
     libprotoc-dev \
     libsnappy-dev \
@@ -127,6 +128,34 @@ RUN pip install pylint
 ####
 RUN pip install python-dateutil
 
+####
+# Install Ruby 2, based on Yetus 0.4.0 dockerfile
+###
+RUN echo 'gem: --no-rdoc --no-ri' >> /root/.gemrc
+RUN apt-get -q install -y ruby2.0
+#
+# on trusty, the above installs ruby2.0 and ruby (1.9.3) exes
+# but update-alternatives is broken, so we need to do some work
+# to make 2.0 actually the default without the system flipping out
+#
+# See https://bugs.launchpad.net/ubuntu/+source/ruby2.0/+bug/1310292
+#
+RUN dpkg-divert --add --rename --divert /usr/bin/ruby.divert /usr/bin/ruby
+RUN dpkg-divert --add --rename --divert /usr/bin/gem.divert /usr/bin/gemrc
+RUN update-alternatives --install /usr/bin/ruby ruby /usr/bin/ruby2.0 1
+RUN update-alternatives --install /usr/bin/gem gem /usr/bin/gem2.0 1
+
+
+####
+# Install rubocop
+###
+RUN gem install rubocop
+
+####
+# Install ruby-lint
+###
+RUN gem install ruby-lint
+
 ###
 # Avoid out of memory errors in builds
 ###


[3/4] hbase git commit: HBASE-18147 POC jenkinsfile for nightly checks.

Posted by bu...@apache.org.
HBASE-18147 POC jenkinsfile for nightly checks.

* adds ruby tools to dockerfile
* adds rubocop to dockerfile
* adds ruby-lint to dockerfile
* adds perlcritic to dockerfile
* TODO branch with mulitple jdk versions


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d6f8f76
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d6f8f76
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d6f8f76

Branch: refs/heads/HBASE-18147
Commit: 6d6f8f76bf60bbf5ddfa64dabe1811f481b04e4e
Parents: c0725dd
Author: Sean Busbey <se...@gmail.com>
Authored: Tue Jul 4 15:12:38 2017 -0400
Committer: Sean Busbey <bu...@apache.org>
Committed: Thu Jul 13 09:35:22 2017 -0500

----------------------------------------------------------------------
 dev-support/Jenkinsfile       | 196 +++++++++++++++++++++++++++++++++++++
 dev-support/docker/Dockerfile |  29 ++++++
 2 files changed, 225 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6d6f8f76/dev-support/Jenkinsfile
----------------------------------------------------------------------
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
new file mode 100644
index 0000000..9de7dea
--- /dev/null
+++ b/dev-support/Jenkinsfile
@@ -0,0 +1,196 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+pipeline {
+  agent {
+    node {
+      label 'Hadoop'
+    }
+  }
+  // work around for YETUS-508, requires maven outside of the dockerfile
+  tools {
+    maven 'Maven (latest)'
+  }
+  triggers {
+    cron('@daily')
+  }
+  options {
+    buildDiscarder(logRotator(numToKeepStr: '30'))
+    timeout (time: 6, unit: 'HOURS')
+    timestamps()
+  }
+  environment {
+    TOOLS = "${env.WORKSPACE}/tools"
+    // where we check out to across stages
+    BASEDIR = "${env.WORKSPACE}/component"
+    YETUS_RELEASE = '0.4.0'
+    // where we'll write everything from different steps.
+    OUTPUT_RELATIVE = 'output'
+    OUTPUTDIR = "${env.WORKSPACE}/output"
+  }
+  parameters {
+    booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
+
+    Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
+    booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
+  }
+  stages {
+    stage ('yetus check') {
+      environment {
+        PROJECT = 'hbase'
+        PROJET_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
+        // This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
+        AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
+        WHITESPACE_IGNORE_LIST = '.*/generated/.*'
+        // output from surefire; sadly the archive function in yetus only works on file names.
+        ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*-output.txt,org.apache.h*.txt'
+// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
+        TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop'
+        BRANCH_SPECIFIC_DOCKERFILE = "${env.BASEDIR}/dev-support/docker/Dockerfile"
+        EXCLUDE_TESTS_URL = 'https://builds.apache.org/job/HBase-Find-Flaky-Tests/lastSuccessfulBuild/artifact/excludes/'
+      }
+      steps {
+    // TODO we can move the yetus install into a different stage and then use stash to deploy it.
+        sh  '''#!/usr/bin/env bash
+printenv
+echo "Ensure we have a copy of Apache Yetus."
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
+  echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
+  if [ ! -d "${YETUS_DIR}" ]; then
+    echo "New download of Apache Yetus version ${YETUS_RELEASE}."
+    rm -rf "${WORKSPACE}/.gpg"
+    mkdir -p "${WORKSPACE}/.gpg"
+    chmod -R 700 "${WORKSPACE}/.gpg"
+
+    echo "install yetus project KEYS"
+    curl -L --fail -o "${WORKSPACE}/KEYS_YETUS" https://dist.apache.org/repos/dist/release/yetus/KEYS
+    gpg --homedir "${WORKSPACE}/.gpg" --import "${WORKSPACE}/KEYS_YETUS"
+
+    echo "download yetus release ${YETUS_RELEASE}"
+    curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz"
+    curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
+    echo "verifying yetus release"
+    gpg --homedir "${WORKSPACE}/.gpg" --verify "yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
+    mv "yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
+  else
+    echo "Reusing cached download of Apache Yetus version ${YETUS_RELEASE}."
+  fi
+else
+  YETUS_DIR="${WORKSPACE}/yetus-git"
+  rm -rf "${YETUS_DIR}"
+  echo "downloading from github"
+  curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
+fi
+if [ ! -d "${YETUS_DIR}" ]; then
+  echo "unpacking yetus into '${YETUS_DIR}'"
+  mkdir -p "${YETUS_DIR}"
+  gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
+fi
+	'''
+    // TODO we can move the personality install into a different stage and then use stash to deploy it.
+	dir ("${env.TOOLS}") {
+	  sh """#!/usr/bin/env bash
+echo "Downloading Project personality."
+curl -L  -o personality.sh "${env.PROJET_PERSONALITY}"
+	"""
+	}
+	sh '''#!/usr/bin/env bash
+YETUS_ARGS=()
+YETUS_ARGS=("--multijdktests=compile,findbugs,unit" "${YETUS_ARGS[@]}")
+# On branch-1* this should point to jdk8, since the default JAVA_HOME will be jdk7.
+# On branch-2* this should be skipped, since we dropped jdk7 and JAVA_HOME will be jdk8
+# On master this should be skipped, since JAVA_HOME will be jdk8
+#YETUS_ARGS=("--multijdkdir=/usr/lib/jvm/java-8-oracle" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--personality=${TOOLS}/personality.sh" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--basedir=${BASEDIR}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--archive-list=${ARCHIVE_PATTERN_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--console-urls" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--build-url-patchdir=artifact/${OUTPUT_RELATIVE}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--docker" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--dockerfile=${BRANCH_SPECIFIC_DOCKERFILE}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--empty-patch" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--html-report-file=${OUTPUTDIR}/console-report.html" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--jenkins" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--mvn-custom-repos" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--patch-dir=${OUTPUTDIR}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--project=${PROJECT}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--resetrepo" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--author-ignore-list=${AUTHOR_IGNORE_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--whitespace-eol-ignore-list=${WHITESPACE_IGNORE_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--whitespace-tabs-ignore-list=${WHITESPACE_IGNORE_LIST}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--sentinel" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--branch=${BRANCH_NAME}" "${YETUS_ARGS[@]}")
+YETUS_ARGS=("--tests-filter=${TESTS_FILTER}" "${YETUS_ARGS[@]}")
+
+if [[ true == "${DEBUG}" ]]; then
+  YETUS_ARGS=("--debug" "${YETUS_ARGS[@]}")
+fi
+
+rm -rf "${OUTPUTDIR}"
+mkdir -p "${OUTPUTDIR}"
+if [[ true !=  "${USE_YETUS_PRERELEASE}" ]]; then
+  YETUS_ARGS=("--shelldocs=${WORKSPACE}/yetus-${YETUS_RELEASE}/bin/shelldocs" "${YETUS_ARGS[@]}")
+  TESTPATCHBIN="${WORKSPACE}/yetus-${YETUS_RELEASE}/bin/test-patch"
+else
+  YETUS_ARGS=("--shelldocs=${WORKSPACE}/yetus-git/shelldocs/shelldocs.py" "${YETUS_ARGS[@]}")
+  TESTPATCHBIN="${WORKSPACE}/yetus-git/precommit/test-patch.sh"
+fi
+echo "Launching yetus with command line:"
+echo "${TESTPATCHBIN} ${YETUS_ARGS[*]}"
+
+/usr/bin/env bash "${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
+	'''
+      }
+    }
+  }
+  post {
+    always {
+      // TODO confirm junit step accounts for multijdk results
+      junit 'output/**/target/**/TEST-*.xml'
+      // gzip surefire reports.
+      sh '''#!/bin/bash -e
+        if [ -d "${OUTPUTDIR}/archiver" ]; then
+          count=$(find "${OUTPUTDIR}/archiver" -type f | wc -l)
+          if [[ 0 -ne ${count} ]]; then
+            echo "zipping ${count} archived files"
+            zip -m -r "${OUTPUTDIR}/test_logs.zip" "${OUTPUTDIR}/archiver"
+          else
+            echo "No archived files, skipping compressing."
+          fi
+        else
+          echo "No archiver directory, skipping compressing."
+        fi
+'''
+      // env variables don't work in archive? or this has to be relative to WORKSPACE. :(
+      archive 'output/*'
+      archive 'output/**/*'
+      publishHTML target: [
+        allowMissing: true,
+        keepAll: true,
+        alwaysLinkToLastBuild: true,
+        // has to be relative to WORKSPACE :(
+        reportDir: 'output',
+        reportFiles: 'console-report.html',
+        reportName: 'Nightly Build Report'
+      ]
+    }
+    failure {
+      deleteDir()
+    }
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/6d6f8f76/dev-support/docker/Dockerfile
----------------------------------------------------------------------
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index 2ecc42e..c654ded 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -47,6 +47,7 @@ RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
     libbz2-dev \
     libcurl4-openssl-dev \
     libfuse-dev \
+    libperl-critic-perl \
     libprotobuf-dev \
     libprotoc-dev \
     libsnappy-dev \
@@ -126,6 +127,34 @@ RUN pip install pylint
 ####
 RUN pip install python-dateutil
 
+####
+# Install Ruby 2, based on Yetus 0.4.0 dockerfile
+###
+RUN echo 'gem: --no-rdoc --no-ri' >> /root/.gemrc
+RUN apt-get -q install -y ruby2.0
+#
+# on trusty, the above installs ruby2.0 and ruby (1.9.3) exes
+# but update-alternatives is broken, so we need to do some work
+# to make 2.0 actually the default without the system flipping out
+#
+# See https://bugs.launchpad.net/ubuntu/+source/ruby2.0/+bug/1310292
+#
+RUN dpkg-divert --add --rename --divert /usr/bin/ruby.divert /usr/bin/ruby
+RUN dpkg-divert --add --rename --divert /usr/bin/gem.divert /usr/bin/gemrc
+RUN update-alternatives --install /usr/bin/ruby ruby /usr/bin/ruby2.0 1
+RUN update-alternatives --install /usr/bin/gem gem /usr/bin/gem2.0 1
+
+
+####
+# Install rubocop
+###
+RUN gem install rubocop
+
+####
+# Install ruby-lint
+###
+RUN gem install ruby-lint
+
 ###
 # Avoid out of memory errors in builds
 ###