You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2013/02/13 21:58:32 UTC

svn commit: r1445918 [2/29] - in /hbase/branches/hbase-7290: ./ bin/ conf/ dev-support/ hbase-client/ hbase-common/ hbase-common/src/main/java/org/apache/hadoop/hbase/ hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ hbase-common/src/mai...

Modified: hbase/branches/hbase-7290/dev-support/hbase_eclipse_formatter.xml
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/dev-support/hbase_eclipse_formatter.xml?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/dev-support/hbase_eclipse_formatter.xml (original)
+++ hbase/branches/hbase-7290/dev-support/hbase_eclipse_formatter.xml Wed Feb 13 20:58:23 2013
@@ -106,7 +106,7 @@
 <setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
 <setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/>
 <setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
 <setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
@@ -122,7 +122,7 @@
 <setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
 <setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="16"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
@@ -270,11 +270,11 @@
 <setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.7"/>
 <setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
 <setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="16"/>
 <setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="true"/>
 <setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="16"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
 <setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>

Modified: hbase/branches/hbase-7290/dev-support/test-patch.properties
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/dev-support/test-patch.properties?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/dev-support/test-patch.properties (original)
+++ hbase/branches/hbase-7290/dev-support/test-patch.properties Wed Feb 13 20:58:23 2013
@@ -19,5 +19,8 @@ MAVEN_OPTS="-Xmx3g"
 # Please update the per-module test-patch.properties if you update this file.
 
 OK_RELEASEAUDIT_WARNINGS=84
-OK_FINDBUGS_WARNINGS=517
-OK_JAVADOC_WARNINGS=169
+OK_FINDBUGS_WARNINGS=127
+# Allow two warnings.  Javadoc complains about sun.misc.Unsafe use.  See HBASE-7457
+OK_JAVADOC_WARNINGS=2
+
+MAX_LINE_LENGTH=100

Modified: hbase/branches/hbase-7290/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/dev-support/test-patch.sh?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/dev-support/test-patch.sh (original)
+++ hbase/branches/hbase-7290/dev-support/test-patch.sh Wed Feb 13 20:58:23 2013
@@ -583,6 +583,34 @@ $JIRA_COMMENT_FOOTER"
 }
 
 ###############################################################################
+### Check line lengths
+checkLineLengths () {
+  echo ""
+  echo ""
+  echo "======================================================================"
+  echo "======================================================================"
+  echo "    Checking that no line have length > $MAX_LINE_LENGTH"
+  echo "======================================================================"
+  echo "======================================================================"
+  echo ""
+  echo ""
+  #see http://en.wikipedia.org/wiki/Diff#Unified_format
+
+  ll=`cat $PATCH_DIR/patch | grep "^+" | grep -v "^@@" | grep -v "^+++" | grep -v "import" | wc -L`
+  MAX_LINE_LENGTH_PATCH=`expr $MAX_LINE_LENGTH + 1`
+  if [[ "$ll" -gt "$MAX_LINE_LENGTH_PATCH" ]]; then
+    JIRA_COMMENT="$JIRA_COMMENT
+
+    {color:red}-1 lineLengths{color}.  The patch introduces lines longer than $MAX_LINE_LENGTH"
+    return 1
+  fi
+  JIRA_COMMENT="$JIRA_COMMENT
+
+    {color:green}+1 lineLengths{color}.  The patch does not introduce lines longer than $MAX_LINE_LENGTH"
+  return 0
+}
+
+###############################################################################
 ### Run the tests
 runTests () {
   echo ""
@@ -594,6 +622,10 @@ runTests () {
   echo "======================================================================"
   echo ""
   echo ""
+
+
+  ### kill any process remaining from another test, maybe even another project
+  jps | grep surefirebooter | cut -d ' ' -f 1 | xargs kill -9 2>/dev/null
   
   failed_tests=""
   ### Kill any rogue build processes from the last attempt
@@ -621,16 +653,26 @@ runTests () {
   fi
   ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
   if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
-    echo "There are $ZOMBIE_TESTS_COUNT zombie tests, they should have been killed by surefire but survived"
-    echo "************ BEGIN zombies jstack extract"
-    jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack | grep ".test" | grep "\.java"
-    echo "************ END  zombies jstack extract"
-     JIRA_COMMENT="$JIRA_COMMENT
-
-     {color:red}-1 core zombie tests{color}.  There are zombie tests. See build logs for details."
-    BAD=1
+    #It seems sometimes the tests are not dying immediately. Let's give them 30s
+    echo "Suspicious java process found - waiting 30s to see if there are just slow to stop"
+    sleep 30
+    ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
+    if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
+      echo "There are $ZOMBIE_TESTS_COUNT zombie tests, they should have been killed by surefire but survived"
+      echo "************ BEGIN zombies jstack extract"
+      ZB_STACK=`jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack | grep ".test" | grep "\.java"`
+      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack
+      echo "************ END  zombies jstack extract"
+      JIRA_COMMENT="$JIRA_COMMENT
+
+     {color:red}-1 core zombie tests{color}.  There are ${ZOMBIE_TESTS_COUNT} zombie test(s): ${ZB_STACK}"
+      BAD=1
+      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs kill -9
+    else
+      echo "We're ok: there is no zombie test, but some tests took some time to stop"
+    fi
   else
-    echo "We're ok: there is no zombie tests"
+    echo "We're ok: there is no zombie test"
   fi
   return $BAD
 }
@@ -773,6 +815,8 @@ checkFindbugsWarnings
 (( RESULT = RESULT + $? ))
 checkReleaseAuditWarnings
 (( RESULT = RESULT + $? ))
+checkLineLengths
+(( RESULT = RESULT + $? ))
 ### Do not call these when run by a developer 
 if [[ $JENKINS == "true" ]] ; then
   runTests

Propchange: hbase/branches/hbase-7290/hbase-client/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,3 +3,6 @@
 build
 logs
 target
+*.iws
+*.iml
+*.ipr

Propchange: hbase/branches/hbase-7290/hbase-common/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,5 +3,6 @@
 build
 logs
 target
-
-
+*.iws
+*.iml
+*.ipr

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java Wed Feb 13 20:58:23 2013
@@ -80,37 +80,37 @@ public class KeyValue implements Cell, H
   /**
    * Comparator for plain key/values; i.e. non-catalog table key/values.
    */
-  public static KVComparator COMPARATOR = new KVComparator();
+  public static final KVComparator COMPARATOR = new KVComparator();
 
   /**
    * Comparator for plain key; i.e. non-catalog table key.  Works on Key portion
    * of KeyValue only.
    */
-  public static KeyComparator KEY_COMPARATOR = new KeyComparator();
+  public static final KeyComparator KEY_COMPARATOR = new KeyComparator();
 
   /**
    * A {@link KVComparator} for <code>.META.</code> catalog table
    * {@link KeyValue}s.
    */
-  public static KVComparator META_COMPARATOR = new MetaComparator();
+  public static final KVComparator META_COMPARATOR = new MetaComparator();
 
   /**
    * A {@link KVComparator} for <code>.META.</code> catalog table
    * {@link KeyValue} keys.
    */
-  public static KeyComparator META_KEY_COMPARATOR = new MetaKeyComparator();
+  public static final KeyComparator META_KEY_COMPARATOR = new MetaKeyComparator();
 
   /**
    * A {@link KVComparator} for <code>-ROOT-</code> catalog table
    * {@link KeyValue}s.
    */
-  public static KVComparator ROOT_COMPARATOR = new RootComparator();
+  public static final KVComparator ROOT_COMPARATOR = new RootComparator();
 
   /**
    * A {@link KVComparator} for <code>-ROOT-</code> catalog table
    * {@link KeyValue} keys.
    */
-  public static KeyComparator ROOT_KEY_COMPARATOR = new RootKeyComparator();
+  public static final KeyComparator ROOT_KEY_COMPARATOR = new RootKeyComparator();
 
   /**
    * Get the appropriate row comparator for the specified table.
@@ -842,16 +842,13 @@ public class KeyValue implements Cell, H
 
   /**
    * Needed doing 'contains' on List.  Only compares the key portion, not the value.
-   *
-   * For temporary backwards compatibility with the original KeyValue.equals method, we ignore the
-   * mvccVersion.
    */
   @Override
   public boolean equals(Object other) {
     if (!(other instanceof Cell)) {
       return false;
     }
-    return CellComparator.equalsIgnoreMvccVersion(this, (Cell)other);
+    return CellComparator.equals(this, (Cell)other);
   }
 
   @Override
@@ -2049,6 +2046,19 @@ public class KeyValue implements Cell, H
   }
 
   /**
+   * Create a KeyValue that is smaller than all other possible KeyValues
+   * for the given row. That is any (valid) KeyValue on 'row' would sort
+   * _after_ the result.
+   *
+   * @param row - row key (arbitrary byte array)
+   * @return First possible KeyValue on passed <code>row</code>
+   */
+  public static KeyValue createFirstOnRow(final byte [] row, int roffset, short rlength) {
+    return new KeyValue(row, roffset, rlength,
+        null, 0, 0, null, 0, 0, HConstants.LATEST_TIMESTAMP, Type.Maximum, null, 0, 0);
+  }
+
+  /**
    * Creates a KeyValue that is smaller than all other KeyValues that
    * are older than the passed timestamp.
    * @param row - row key (arbitrary byte array)

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java Wed Feb 13 20:58:23 2013
@@ -23,10 +23,10 @@ import java.util.Collection;
 import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.IterableUtils;
 import org.apache.hadoop.hbase.util.Strings;
+import org.apache.hbase.cell.CellComparator;
 
 import com.google.common.collect.Lists;
 
@@ -61,7 +61,6 @@ public class KeyValueTestUtil {
       );
   }
 
-
   public static ByteBuffer toByteBufferAndRewind(final Iterable<? extends KeyValue> kvs,
       boolean includeMemstoreTS) {
     int totalBytes = KeyValueTool.totalLengthWithMvccVersion(kvs, includeMemstoreTS);
@@ -73,6 +72,27 @@ public class KeyValueTestUtil {
     return bb;
   }
 
+  /**
+   * Checks whether KeyValues from kvCollection2 are contained in kvCollection1.
+   * 
+   * The comparison is made without distinguishing MVCC version of the KeyValues
+   * 
+   * @param kvCollection1
+   * @param kvCollection2
+   * @return true if KeyValues from kvCollection2 are contained in kvCollection1
+   */
+  public static boolean containsIgnoreMvccVersion(Collection<KeyValue> kvCollection1,
+      Collection<KeyValue> kvCollection2) {
+    for (KeyValue kv1 : kvCollection1) {
+      boolean found = false;
+      for (KeyValue kv2 : kvCollection2) {
+        if (CellComparator.equalsIgnoreMvccVersion(kv1, kv2)) found = true;
+      }
+      if (!found) return false;
+    }
+    return true;
+  }
+  
   public static List<KeyValue> rewindThenToList(final ByteBuffer bb,
       final boolean includesMemstoreTS) {
     bb.rewind();

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java Wed Feb 13 20:58:23 2013
@@ -36,6 +36,7 @@ import org.apache.hadoop.io.compress.Com
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.io.compress.Decompressor;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.io.compress.DoNotPool;
 import org.apache.hadoop.io.compress.GzipCodec;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -99,35 +100,55 @@ public final class Compression {
   public static enum Algorithm {
     LZO("lzo") {
       // Use base type to avoid compile-time dependencies.
-      private transient CompressionCodec lzoCodec;
+      private volatile transient CompressionCodec lzoCodec;
+      private transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {
         if (lzoCodec == null) {
-          try {
-            Class<?> externalCodec =
-                getClassLoaderForCodec().loadClass("com.hadoop.compression.lzo.LzoCodec");
-            lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
-                new Configuration(conf));
-          } catch (ClassNotFoundException e) {
-            throw new RuntimeException(e);
+          synchronized (lock) {
+            if (lzoCodec == null) {
+              lzoCodec = buildCodec(conf);
+            }
           }
         }
         return lzoCodec;
       }
+
+      private CompressionCodec buildCodec(Configuration conf) {
+        try {
+          Class<?> externalCodec =
+              ClassLoader.getSystemClassLoader()
+                  .loadClass("com.hadoop.compression.lzo.LzoCodec");
+          return (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
+              new Configuration(conf));
+        } catch (ClassNotFoundException e) {
+          throw new RuntimeException(e);
+        }
+      }
     },
     GZ("gz") {
-      private transient GzipCodec codec;
+      private volatile transient GzipCodec codec;
+      private transient Object lock = new Object();
 
       @Override
       DefaultCodec getCodec(Configuration conf) {
         if (codec == null) {
-          codec = new ReusableStreamGzipCodec();
-          codec.setConf(new Configuration(conf));
+          synchronized (lock) {
+            if (codec == null) {
+              codec = buildCodec(conf);
+            }
+          }
         }
 
         return codec;
       }
+
+      private GzipCodec buildCodec(Configuration conf) {
+        GzipCodec gzcodec = new ReusableStreamGzipCodec();
+        gzcodec.setConf(new Configuration(conf));
+        return gzcodec;
+      }
     },
 
     NONE("none") {
@@ -163,40 +184,62 @@ public final class Compression {
       }
     },
     SNAPPY("snappy") {
-        // Use base type to avoid compile-time dependencies.
-        private transient CompressionCodec snappyCodec;
+      // Use base type to avoid compile-time dependencies.
+      private volatile transient CompressionCodec snappyCodec;
+      private transient Object lock = new Object();
 
-        @Override
-        CompressionCodec getCodec(Configuration conf) {
-          if (snappyCodec == null) {
-            try {
-              Class<?> externalCodec =
-                  getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.SnappyCodec");
-              snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf);
-            } catch (ClassNotFoundException e) {
-              throw new RuntimeException(e);
+      @Override
+      CompressionCodec getCodec(Configuration conf) {
+        if (snappyCodec == null) {
+          synchronized (lock) {
+            if (snappyCodec == null) {
+              snappyCodec = buildCodec(conf);
             }
           }
-          return snappyCodec;
         }
+        return snappyCodec;
+      }
+
+      private CompressionCodec buildCodec(Configuration conf) {
+        try {
+          Class<?> externalCodec =
+              ClassLoader.getSystemClassLoader()
+                  .loadClass("org.apache.hadoop.io.compress.SnappyCodec");
+          return (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
+              conf);
+        } catch (ClassNotFoundException e) {
+          throw new RuntimeException(e);
+        }
+      }
     },
     LZ4("lz4") {
       // Use base type to avoid compile-time dependencies.
-      private transient CompressionCodec lz4Codec;
+      private volatile transient CompressionCodec lz4Codec;
+      private transient Object lock = new Object();
 
       @Override
       CompressionCodec getCodec(Configuration conf) {
         if (lz4Codec == null) {
-          try {
-            Class<?> externalCodec =
-                getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec");
-            lz4Codec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf);
-          } catch (ClassNotFoundException e) {
-            throw new RuntimeException(e);
+          synchronized (lock) {
+            if (lz4Codec == null) {
+              lz4Codec = buildCodec(conf);
+            }
           }
+          buildCodec(conf);
         }
         return lz4Codec;
       }
+
+      private CompressionCodec buildCodec(Configuration conf) {
+        try {
+          Class<?> externalCodec =
+              getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec");
+          return (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
+              conf);
+        } catch (ClassNotFoundException e) {
+          throw new RuntimeException(e);
+        }
+      }
   };
 
     private final Configuration conf;
@@ -309,6 +352,9 @@ public final class Compression {
     public void returnDecompressor(Decompressor decompressor) {
       if (decompressor != null) {
         CodecPool.returnDecompressor(decompressor);
+        if (decompressor.getClass().isAnnotationPresent(DoNotPool.class)) {
+          decompressor.end();
+        }
       }
     }
 

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/ReusableStreamGzipCodec.java Wed Feb 13 20:58:23 2013
@@ -25,6 +25,7 @@ import java.util.zip.GZIPOutputStream;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.JVM;
 import org.apache.hadoop.io.compress.CompressionOutputStream;
 import org.apache.hadoop.io.compress.CompressorStream;
 import org.apache.hadoop.io.compress.GzipCodec;
@@ -77,6 +78,10 @@ public class ReusableStreamGzipCodec ext
     }
 
     private static class ResetableGZIPOutputStream extends GZIPOutputStream {
+
+      private static final int TRAILER_SIZE = 8;
+      private static final boolean HAS_BROKEN_FINISH = JVM.isGZIPOutputStreamFinishBroken();
+
       public ResetableGZIPOutputStream(OutputStream out) throws IOException {
         super(out);
       }
@@ -86,6 +91,59 @@ public class ReusableStreamGzipCodec ext
         crc.reset();
         out.write(GZIP_HEADER);
       }
+
+      /**
+       * Override because certain implementation calls def.end() which
+       * causes problem when resetting the stream for reuse.
+       */
+      @Override
+      public void finish() throws IOException {
+        if (HAS_BROKEN_FINISH) { 
+          if (!def.finished()) {
+            def.finish();
+            while (!def.finished()) {
+              int i = def.deflate(this.buf, 0, this.buf.length);
+              if ((def.finished()) && (i <= this.buf.length - TRAILER_SIZE)) {
+                writeTrailer(this.buf, i);
+                i += TRAILER_SIZE;
+                out.write(this.buf, 0, i);
+
+                return;
+              }
+              if (i > 0) {
+                out.write(this.buf, 0, i);
+              }
+            }
+
+            byte[] arrayOfByte = new byte[TRAILER_SIZE];
+            writeTrailer(arrayOfByte, 0);
+            out.write(arrayOfByte);
+          }
+        } else {
+          super.finish();
+        }
+      }
+
+      /** re-implement because the relative method in jdk is invisible */
+      private void writeTrailer(byte[] paramArrayOfByte, int paramInt)
+          throws IOException {
+        writeInt((int)this.crc.getValue(), paramArrayOfByte, paramInt);
+        writeInt(this.def.getTotalIn(), paramArrayOfByte, paramInt + 4);
+      }
+
+      /** re-implement because the relative method in jdk is invisible */
+      private void writeInt(int paramInt1, byte[] paramArrayOfByte, int paramInt2)
+          throws IOException {
+        writeShort(paramInt1 & 0xFFFF, paramArrayOfByte, paramInt2);
+        writeShort(paramInt1 >> 16 & 0xFFFF, paramArrayOfByte, paramInt2 + 2);
+      }
+
+      /** re-implement because the relative method in jdk is invisible */
+      private void writeShort(int paramInt1, byte[] paramArrayOfByte, int paramInt2)
+          throws IOException {
+        paramArrayOfByte[paramInt2] = (byte)(paramInt1 & 0xFF);
+        paramArrayOfByte[(paramInt2 + 1)] = (byte)(paramInt1 >> 8 & 0xFF);
+      }
     }
 
     public ReusableGzipOutputStream(OutputStream out) throws IOException {

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java Wed Feb 13 20:58:23 2013
@@ -26,7 +26,22 @@ import org.apache.hadoop.classification.
 @InterfaceAudience.Private
 public class IncrementingEnvironmentEdge implements EnvironmentEdge {
 
-  private long timeIncrement = 1;
+  private long timeIncrement;
+
+  /**
+   * Construct an incremental edge starting from currentTimeMillis
+   */
+  public IncrementingEnvironmentEdge() {
+    this(System.currentTimeMillis());
+  }
+
+  /**
+   * Construct an incremental edge with an initial amount
+   * @param initialAmount the initial value to start with
+   */
+  public IncrementingEnvironmentEdge(long initialAmount) {
+    this.timeIncrement = initialAmount;
+  }
 
   /**
    * {@inheritDoc}
@@ -38,4 +53,12 @@ public class IncrementingEnvironmentEdge
   public synchronized long currentTimeMillis() {
     return timeIncrement++;
   }
+
+  /**
+   * Increment the time by the given amount
+   */
+  public synchronized long incrementTime(long amount) {
+    timeIncrement += amount;
+    return timeIncrement;
+  }
 }

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java Wed Feb 13 20:58:23 2013
@@ -34,7 +34,6 @@ import java.lang.reflect.Method;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
-import org.apache.hadoop.util.Shell;
 
 /**
  * This class is a wrapper for the implementation of
@@ -57,6 +56,7 @@ public class JVM 
     System.getProperty("os.name").startsWith("Windows");
   private static final boolean linux =
     System.getProperty("os.name").startsWith("Linux");
+  private static final String JVMVersion = System.getProperty("java.version");
 
   /**
    * Constructor. Get the running Operating System instance
@@ -70,12 +70,21 @@ public class JVM 
    * 
    * @return whether this is unix or not.
    */
-  public boolean isUnix() {
+  public static boolean isUnix() {
     if (windows) {
       return false;
     }
     return (ibmvendor ? linux : true);
   }
+  
+  /**
+   * Check if the finish() method of GZIPOutputStream is broken
+   * 
+   * @return whether GZIPOutputStream.finish() is broken.
+   */
+  public static boolean isGZIPOutputStreamFinishBroken() {
+    return ibmvendor && JVMVersion.contains("1.6.0");
+  }
 
   /**
    * Load the implementation of UnixOperatingSystemMXBean for Oracle jvm
@@ -83,8 +92,7 @@ public class JVM 
    * @param mBeanMethodName : method to run from the interface UnixOperatingSystemMXBean
    * @return the method result
    */
-  private Long runUnixMXBeanMethod (String mBeanMethodName) {
-  
+  private Long runUnixMXBeanMethod (String mBeanMethodName) {  
     Object unixos;
     Class<?> classRef;
     Method mBeanMethod;
@@ -118,6 +126,7 @@ public class JVM 
       ofdc = runUnixMXBeanMethod("getOpenFileDescriptorCount");
       return (ofdc != null ? ofdc.longValue () : -1);
     }
+    InputStream in = null;
     try {
       //need to get the PID number of the process first
       RuntimeMXBean rtmbean = ManagementFactory.getRuntimeMXBean();
@@ -128,7 +137,7 @@ public class JVM 
       Process p = Runtime.getRuntime().exec(
       new String[] { "bash", "-c",
           "ls /proc/" + pidhost[0] + "/fdinfo | wc -l" });
-      InputStream in = p.getInputStream();
+      in = p.getInputStream();
       BufferedReader output = new BufferedReader(
         		new InputStreamReader(in));
 
@@ -137,6 +146,14 @@ public class JVM 
              return Long.parseLong(openFileDesCount);
      } catch (IOException ie) {
      	     LOG.warn("Not able to get the number of open file descriptors", ie);
+    } finally {
+      if (in != null){
+        try {
+          in.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the InputStream", e);
+        }
+      }
     }
     return -1;
   }
@@ -155,13 +172,14 @@ public class JVM 
       mfdc = runUnixMXBeanMethod("getMaxFileDescriptorCount");
       return (mfdc != null ? mfdc.longValue () : -1);
     }
+    InputStream in = null;
     try {
       
       //using linux bash commands to retrieve info
       Process p = Runtime.getRuntime().exec(
         	  new String[] { "bash", "-c",
         	  "ulimit -n" });
-      InputStream in = p.getInputStream();
+      in = p.getInputStream();
       BufferedReader output = new BufferedReader(
         new InputStreamReader(in));
 
@@ -170,6 +188,14 @@ public class JVM 
         	return Long.parseLong(maxFileDesCount);
     }   catch (IOException ie) {
       		LOG.warn("Not able to get the max number of file descriptors", ie);
+    } finally {
+      if (in != null){
+        try {
+          in.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the InputStream", e);
+        }
+      }
     }
     return -1;
  }

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java Wed Feb 13 20:58:23 2013
@@ -208,16 +208,30 @@ public class Threads {
   }
 
   /**
-   * Get a named {@link ThreadFactory} that just builds daemon threads
-   * @param prefix name prefix for all threads created from the factory
-   * @return a thread factory that creates named, daemon threads
+   * Same as {#newDaemonThreadFactory(String, UncaughtExceptionHandler)},
+   * without setting the exception handler.
    */
   public static ThreadFactory newDaemonThreadFactory(final String prefix) {
+    return newDaemonThreadFactory(prefix, null);
+  }
+
+  /**
+   * Get a named {@link ThreadFactory} that just builds daemon threads.
+   * @param prefix name prefix for all threads created from the factory
+   * @param handler unhandles exception handler to set for all threads
+   * @return a thread factory that creates named, daemon threads with
+   *         the supplied exception handler and normal priority
+   */
+  public static ThreadFactory newDaemonThreadFactory(final String prefix,
+      final UncaughtExceptionHandler handler) {
     final ThreadFactory namedFactory = getNamedThreadFactory(prefix);
     return new ThreadFactory() {
       @Override
       public Thread newThread(Runnable r) {
         Thread t = namedFactory.newThread(r);
+        if (handler != null) {
+          t.setUncaughtExceptionHandler(handler);
+        }
         if (!t.isDaemon()) {
           t.setDaemon(true);
         }

Modified: hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java Wed Feb 13 20:58:23 2013
@@ -16,6 +16,7 @@
  */
 package org.apache.hadoop.hbase.util.test;
 
+import java.util.Map;
 import java.util.Random;
 
 import org.apache.hadoop.hbase.util.Bytes;
@@ -27,8 +28,6 @@ import org.apache.hadoop.hbase.util.MD5H
  * hash. Values are generated by selecting value size in the configured range
  * and generating a pseudo-random sequence of bytes seeded by key, column
  * qualifier, and value size.
- * <p>
- * Not thread-safe, so a separate instance is needed for every writer thread/
  */
 public class LoadTestKVGenerator {
 
@@ -49,13 +48,13 @@ public class LoadTestKVGenerator {
 
   /**
    * Verifies that the given byte array is the same as what would be generated
-   * for the given row key and qualifier. We are assuming that the value size
-   * is correct, and only verify the actual bytes. However, if the min/max
-   * value sizes are set sufficiently high, an accidental match should be
+   * for the given seed strings (row/cf/column/...). We are assuming that the
+   * value size is correct, and only verify the actual bytes. However, if the
+   * min/max value sizes are set sufficiently high, an accidental match should be
    * extremely improbable.
    */
-  public static boolean verify(String rowKey, String qual, byte[] value) {
-    byte[] expectedData = getValueForRowColumn(rowKey, qual, value.length);
+  public static boolean verify(byte[] value, byte[]... seedStrings) {
+    byte[] expectedData = getValueForRowColumn(value.length, seedStrings);
     return Bytes.equals(expectedData, value);
   }
 
@@ -74,27 +73,31 @@ public class LoadTestKVGenerator {
   /**
    * Generates a value for the given key index and column qualifier. Size is
    * selected randomly in the configured range. The generated value depends
-   * only on the combination of the key, qualifier, and the selected value
-   * size. This allows to verify the actual value bytes when reading, as done
-   * in {@link #verify(String, String, byte[])}.
+   * only on the combination of the strings passed (key/cf/column/...) and the selected
+   * value size. This allows to verify the actual value bytes when reading, as done
+   * in {#verify(byte[], byte[]...)}
+   * This method is as thread-safe as Random class. It appears that the worst bug ever
+   * found with the latter is that multiple threads will get some duplicate values, which
+   * we don't care about.
    */
-  public byte[] generateRandomSizeValue(long key, String qual) {
-    String rowKey = md5PrefixedKey(key);
+  public byte[] generateRandomSizeValue(byte[]... seedStrings) {
     int dataSize = minValueSize;
-    if(minValueSize != maxValueSize){
+    if(minValueSize != maxValueSize) {
       dataSize = minValueSize + randomForValueSize.nextInt(Math.abs(maxValueSize - minValueSize));
     }
-    return getValueForRowColumn(rowKey, qual, dataSize);
+    return getValueForRowColumn(dataSize, seedStrings);
   }
 
   /**
    * Generates random bytes of the given size for the given row and column
    * qualifier. The random seed is fully determined by these parameters.
    */
-  private static byte[] getValueForRowColumn(String rowKey, String qual,
-      int dataSize) {
-    Random seededRandom = new Random(rowKey.hashCode() + qual.hashCode() +
-        dataSize);
+  private static byte[] getValueForRowColumn(int dataSize, byte[]... seedStrings) {
+    long seed = dataSize;
+    for (byte[] str : seedStrings) {
+      seed += Bytes.toString(str).hashCode();
+    }
+    Random seededRandom = new Random(seed);
     byte[] randomBytes = new byte[dataSize];
     seededRandom.nextBytes(randomBytes);
     return randomBytes;

Modified: hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java Wed Feb 13 20:58:23 2013
@@ -101,7 +101,7 @@ public class HBaseCommonTestingUtility {
    * @return True if we removed the test dirs
    * @throws IOException
    */
-  boolean cleanupTestDir() throws IOException {
+  public boolean cleanupTestDir() throws IOException {
     if (deleteDir(this.dataTestDir)) {
       this.dataTestDir = null;
       return true;
@@ -153,4 +153,4 @@ public class HBaseCommonTestingUtility {
       return false;
     }
   }
-};
\ No newline at end of file
+};

Modified: hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitListener.java Wed Feb 13 20:58:23 2013
@@ -89,11 +89,9 @@ public class ResourceCheckerJUnitListene
   static class OpenFileDescriptorResourceAnalyzer extends ResourceChecker.ResourceAnalyzer {
     @Override
     public int getVal(Phase phase) {
+      if (JVM.isUnix() == false) return 0;
       JVM jvm = new JVM();
-      if (jvm != null && jvm.isUnix() == true)
-          return (int)jvm.getOpenFileDescriptorCount();
-      else
-           return 0;
+      return (int)jvm.getOpenFileDescriptorCount();
     }
 
     @Override
@@ -105,11 +103,9 @@ public class ResourceCheckerJUnitListene
   static class MaxFileDescriptorResourceAnalyzer extends ResourceChecker.ResourceAnalyzer {
     @Override
     public int getVal(Phase phase) {
+      if (JVM.isUnix() == false) return 0;
       JVM jvm = new JVM();
-      if (jvm != null && jvm.isUnix() == true)
-           return (int)jvm.getMaxFileDescriptorCount();
-      else
-           return 0;
+      return (int)jvm.getMaxFileDescriptorCount();
      } 
    }
 

Modified: hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java Wed Feb 13 20:58:23 2013
@@ -18,7 +18,10 @@
  */
 package org.apache.hadoop.hbase;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -28,21 +31,23 @@ import java.io.PrintStream;
 import java.lang.reflect.Method;
 import java.net.URL;
 import java.net.URLClassLoader;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.jar.*;
-import javax.tools.*;
+import java.util.jar.Attributes;
+import java.util.jar.JarEntry;
+import java.util.jar.JarOutputStream;
+import java.util.jar.Manifest;
 
-import org.apache.hadoop.hbase.SmallTests;
+import javax.tools.JavaCompiler;
+import javax.tools.ToolProvider;
 
-import org.junit.experimental.categories.Category;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.junit.experimental.categories.Category;
 
 @Category(SmallTests.class)
 public class TestClassFinder {
@@ -184,12 +189,18 @@ public class TestClassFinder {
     FileAndPath c2 = compileTestClass(counter, "", "c2");
     packageAndLoadJar(c1);
     final String excludedJar = packageAndLoadJar(c2);
+    /* ResourcePathFilter will pass us the resourcePath as a path of a
+     * URL from the classloader. For Windows, the ablosute path and the
+     * one from the URL have different file separators.
+     */
+    final String excludedJarResource =
+      new File(excludedJar).toURI().getRawSchemeSpecificPart();
 
     final ClassFinder.ResourcePathFilter notExcJarFilter =
         new ClassFinder.ResourcePathFilter() {
       @Override
       public boolean isCandidatePath(String resourcePath, boolean isJar) {
-        return !isJar || !resourcePath.equals(excludedJar);
+        return !isJar || !resourcePath.equals(excludedJarResource);
       }
     };
     ClassFinder incClassesFinder = new ClassFinder(notExcJarFilter, null, null);

Modified: hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java (original)
+++ hbase/branches/hbase-7290/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java Wed Feb 13 20:58:23 2013
@@ -41,8 +41,8 @@ public class TestLoadTestKVGenerator {
   @Test
   public void testValueLength() {
     for (int i = 0; i < 1000; ++i) {
-      byte[] v = gen.generateRandomSizeValue(i,
-          String.valueOf(rand.nextInt()));
+      byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(),
+          String.valueOf(rand.nextInt()).getBytes());
       assertTrue(MIN_LEN <= v.length);
       assertTrue(v.length <= MAX_LEN);
     }
@@ -52,12 +52,12 @@ public class TestLoadTestKVGenerator {
   public void testVerification() {
     for (int i = 0; i < 1000; ++i) {
       for (int qualIndex = 0; qualIndex < 20; ++qualIndex) {
-        String qual = String.valueOf(qualIndex);
-        byte[] v = gen.generateRandomSizeValue(i, qual);
-        String rowKey = LoadTestKVGenerator.md5PrefixedKey(i);
-        assertTrue(LoadTestKVGenerator.verify(rowKey, qual, v));
+        byte[] qual = String.valueOf(qualIndex).getBytes();
+        byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
+        byte[] v = gen.generateRandomSizeValue(rowKey, qual);
+        assertTrue(LoadTestKVGenerator.verify(v, rowKey, qual));
         v[0]++;
-        assertFalse(LoadTestKVGenerator.verify(rowKey, qual, v));
+        assertFalse(LoadTestKVGenerator.verify(v, rowKey, qual));
       }
     }
   }

Propchange: hbase/branches/hbase-7290/hbase-examples/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,5 +3,6 @@
 build
 logs
 target
-
-
+*.iws
+*.iml
+*.ipr

Modified: hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java (original)
+++ hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java Wed Feb 13 20:58:23 2013
@@ -200,7 +200,7 @@ public class BulkDeleteEndpoint extends 
     }
     // We just need the rowkey. Get it from 1st KV.
     byte[] row = deleteRow.get(0).getRow();
-    Delete delete = new Delete(row, ts, null);
+    Delete delete = new Delete(row, ts);
     if (deleteType == DeleteType.FAMILY) {
       Set<byte[]> families = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
       for (KeyValue kv : deleteRow) {

Modified: hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java (original)
+++ hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java Wed Feb 13 20:58:23 2013
@@ -63,8 +63,8 @@ import org.apache.zookeeper.ZooKeeper;
  * listeners registered with ZooKeeperWatcher cannot be removed.
  */
 public class ZooKeeperScanPolicyObserver extends BaseRegionObserver {
-  public static String node = "/backup/example/lastbackup";
-  public static String zkkey = "ZK";
+  public static final String node = "/backup/example/lastbackup";
+  public static final String zkkey = "ZK";
   private static final Log LOG = LogFactory.getLog(ZooKeeperScanPolicyObserver.class);
 
   /**
@@ -93,6 +93,7 @@ public class ZooKeeperScanPolicyObserver
      *
      * @return the last know version of the data
      */
+    @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="REC_CATCH_EXCEPTION")
     public byte[] getData() {
       // try at most twice/minute
       if (needSetup && EnvironmentEdgeManager.currentTimeMillis() > lastSetupTry + 30000) {

Modified: hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java (original)
+++ hbase/branches/hbase-7290/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java Wed Feb 13 20:58:23 2013
@@ -127,7 +127,7 @@ public class DemoClient {
         // Create the demo table with two column families, entry: and unused:
         //
         ArrayList<ColumnDescriptor> columns = new ArrayList<ColumnDescriptor>();
-        ColumnDescriptor col = null;
+        ColumnDescriptor col;
         col = new ColumnDescriptor();
         col.name = ByteBuffer.wrap(bytes("entry:"));
         col.maxVersions = 10;
@@ -227,7 +227,7 @@ public class DemoClient {
             client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), mutations, dummyAttributes);
             printRow(client.getRow(ByteBuffer.wrap(t), ByteBuffer.wrap(row), dummyAttributes));
 
-            Mutation m = null;
+            Mutation m;
             mutations = new ArrayList<Mutation>();
             m = new Mutation();
             m.column = ByteBuffer.wrap(bytes("entry:foo"));
@@ -272,7 +272,7 @@ public class DemoClient {
             }
 
             List<TCell> result = client.get(ByteBuffer.wrap(t), ByteBuffer.wrap(row), ByteBuffer.wrap(bytes("entry:foo")), dummyAttributes);
-            if (result.isEmpty() == false) {
+            if (!result.isEmpty()) {
                 System.out.println("FATAL: shouldn't get here");
                 System.exit(-1);
             }
@@ -305,7 +305,7 @@ public class DemoClient {
         transport.close();
     }
 
-    private final void printVersions(ByteBuffer row, List<TCell> versions) {
+    private void printVersions(ByteBuffer row, List<TCell> versions) {
         StringBuilder rowStr = new StringBuilder();
         for (TCell cell : versions) {
             rowStr.append(utf8(cell.value.array()));
@@ -314,7 +314,7 @@ public class DemoClient {
         System.out.println("row: " + utf8(row.array()) + ", values: " + rowStr);
     }
 
-    private final void printRow(TRowResult rowResult) {
+    private void printRow(TRowResult rowResult) {
         // copy values into a TreeMap to get them in sorted order
 
         TreeMap<String, TCell> sorted = new TreeMap<String, TCell>();

Propchange: hbase/branches/hbase-7290/hbase-hadoop-compat/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,5 +3,6 @@
 build
 logs
 target
-
-
+*.iws
+*.iml
+*.ipr

Propchange: hbase/branches/hbase-7290/hbase-hadoop1-compat/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,5 +3,6 @@
 build
 logs
 target
-
-
+*.iws
+*.iml
+*.ipr

Propchange: hbase/branches/hbase-7290/hbase-hadoop2-compat/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,5 +3,6 @@
 build
 logs
 target
-
-
+*.iws
+*.iml
+*.ipr

Propchange: hbase/branches/hbase-7290/hbase-it/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,5 +3,6 @@
 build
 logs
 target
-
-
+*.iws
+*.iml
+*.ipr

Modified: hbase/branches/hbase-7290/hbase-it/pom.xml
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-it/pom.xml?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-it/pom.xml (original)
+++ hbase/branches/hbase-7290/hbase-it/pom.xml Wed Feb 13 20:58:23 2013
@@ -102,7 +102,7 @@
           <execution>
 	    <!-- generates the file that will be used by the bin/hbase script in the dev env -->
             <id>create-hbase-generated-classpath</id>
-            <phase>compile</phase>    
+            <phase>test</phase>
             <goals>
               <goal>build-classpath</goal>
             </goals>

Modified: hbase/branches/hbase-7290/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java (original)
+++ hbase/branches/hbase-7290/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java Wed Feb 13 20:58:23 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /**
  * Facility for <strong>integration/system</strong> tests. This extends {@link HBaseTestingUtility}
@@ -58,6 +59,11 @@ public class IntegrationTestingUtility e
    */
   public static final String IS_DISTRIBUTED_CLUSTER = "hbase.test.cluster.distributed";
 
+  /** Config for pluggable hbase cluster manager */
+  private static final String HBASE_CLUSTER_MANAGER_CLASS = "hbase.it.clustermanager.class";
+  private static final Class<? extends ClusterManager> DEFAULT_HBASE_CLUSTER_MANAGER_CLASS = 
+    HBaseClusterManager.class;
+  
   /**
    * Initializes the state of the cluster. It starts a new in-process mini cluster, OR
    * if we are given an already deployed distributed cluster it initializes the state.
@@ -122,7 +128,10 @@ public class IntegrationTestingUtility e
 
   private void createDistributedHBaseCluster() throws IOException {
     Configuration conf = getConfiguration();
-    ClusterManager clusterManager = new HBaseClusterManager();
+    Class<? extends ClusterManager> clusterManagerClass = conf.getClass(HBASE_CLUSTER_MANAGER_CLASS,
+      DEFAULT_HBASE_CLUSTER_MANAGER_CLASS, ClusterManager.class);
+    ClusterManager clusterManager = ReflectionUtils.newInstance(
+      clusterManagerClass, conf);
     setHBaseCluster(new DistributedHBaseCluster(conf, clusterManager));
     getHBaseAdmin();
   }

Propchange: hbase/branches/hbase-7290/hbase-protocol/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Feb 13 20:58:23 2013
@@ -3,3 +3,6 @@
 build
 logs
 target
+*.iws
+*.iml
+*.ipr

Modified: hbase/branches/hbase-7290/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java (original)
+++ hbase/branches/hbase-7290/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java Wed Feb 13 20:58:23 2013
@@ -4073,7 +4073,7 @@ public final class AccessControlProtos {
   public interface UserPermissionsRequestOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // required bytes table = 1;
+    // optional bytes table = 1;
     boolean hasTable();
     com.google.protobuf.ByteString getTable();
   }
@@ -4106,7 +4106,7 @@ public final class AccessControlProtos {
     }
     
     private int bitField0_;
-    // required bytes table = 1;
+    // optional bytes table = 1;
     public static final int TABLE_FIELD_NUMBER = 1;
     private com.google.protobuf.ByteString table_;
     public boolean hasTable() {
@@ -4124,10 +4124,6 @@ public final class AccessControlProtos {
       byte isInitialized = memoizedIsInitialized;
       if (isInitialized != -1) return isInitialized == 1;
       
-      if (!hasTable()) {
-        memoizedIsInitialized = 0;
-        return false;
-      }
       memoizedIsInitialized = 1;
       return true;
     }
@@ -4376,10 +4372,6 @@ public final class AccessControlProtos {
       }
       
       public final boolean isInitialized() {
-        if (!hasTable()) {
-          
-          return false;
-        }
         return true;
       }
       
@@ -4417,7 +4409,7 @@ public final class AccessControlProtos {
       
       private int bitField0_;
       
-      // required bytes table = 1;
+      // optional bytes table = 1;
       private com.google.protobuf.ByteString table_ = com.google.protobuf.ByteString.EMPTY;
       public boolean hasTable() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
@@ -6425,7 +6417,7 @@ public final class AccessControlProtos {
       "\017.UserPermission\"\017\n\rGrantResponse\"4\n\rRev" +
       "okeRequest\022#\n\npermission\030\001 \002(\0132\017.UserPer" +
       "mission\"\020\n\016RevokeResponse\"\'\n\026UserPermiss" +
-      "ionsRequest\022\r\n\005table\030\001 \002(\014\">\n\027UserPermis" +
+      "ionsRequest\022\r\n\005table\030\001 \001(\014\">\n\027UserPermis" +
       "sionsResponse\022#\n\npermission\030\001 \003(\0132\017.User" +
       "Permission\":\n\027CheckPermissionsRequest\022\037\n" +
       "\npermission\030\001 \003(\0132\013.Permission\"\032\n\030CheckP" +