You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ec...@apache.org on 2013/02/25 23:50:29 UTC

svn commit: r1449950 [30/35] - in /hbase/trunk: ./ hbase-client/ hbase-client/src/ hbase-client/src/main/ hbase-client/src/main/java/ hbase-client/src/main/java/org/ hbase-client/src/main/java/org/apache/ hbase-client/src/main/java/org/apache/hadoop/ h...

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Classes.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,83 @@
+/*
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Utilities for class manipulation.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class Classes {
+
+  /**
+   * Equivalent of {@link Class#forName(String)} which also returns classes for
+   * primitives like <code>boolean</code>, etc.
+   * 
+   * @param className
+   *          The name of the class to retrieve. Can be either a normal class or
+   *          a primitive class.
+   * @return The class specified by <code>className</code>
+   * @throws ClassNotFoundException
+   *           If the requested class can not be found.
+   */
+  public static Class<?> extendedForName(String className)
+      throws ClassNotFoundException {
+    Class<?> valueType;
+    if (className.equals("boolean")) {
+      valueType = boolean.class;
+    } else if (className.equals("byte")) {
+      valueType = byte.class;
+    } else if (className.equals("short")) {
+      valueType = short.class;
+    } else if (className.equals("int")) {
+      valueType = int.class;
+    } else if (className.equals("long")) {
+      valueType = long.class;
+    } else if (className.equals("float")) {
+      valueType = float.class;
+    } else if (className.equals("double")) {
+      valueType = double.class;
+    } else if (className.equals("char")) {
+      valueType = char.class;
+    } else {
+      valueType = Class.forName(className);
+    }
+    return valueType;
+  }
+
+  public static String stringify(Class[] classes) {
+    StringBuilder buf = new StringBuilder();
+    if (classes != null) {
+      for (Class c : classes) {
+        if (buf.length() > 0) {
+          buf.append(",");
+        }
+        buf.append(c.getName());
+      }
+    } else {
+      buf.append("NULL");
+    }
+    return buf.toString();
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Hash.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,137 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This class represents a common API for hashing functions.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public abstract class Hash {
+  /** Constant to denote invalid hash type. */
+  public static final int INVALID_HASH = -1;
+  /** Constant to denote {@link JenkinsHash}. */
+  public static final int JENKINS_HASH = 0;
+  /** Constant to denote {@link MurmurHash}. */
+  public static final int MURMUR_HASH  = 1;
+
+  /**
+   * This utility method converts String representation of hash function name
+   * to a symbolic constant. Currently two function types are supported,
+   * "jenkins" and "murmur".
+   * @param name hash function name
+   * @return one of the predefined constants
+   */
+  public static int parseHashType(String name) {
+    if ("jenkins".equalsIgnoreCase(name)) {
+      return JENKINS_HASH;
+    } else if ("murmur".equalsIgnoreCase(name)) {
+      return MURMUR_HASH;
+    } else {
+      return INVALID_HASH;
+    }
+  }
+
+  /**
+   * This utility method converts the name of the configured
+   * hash type to a symbolic constant.
+   * @param conf configuration
+   * @return one of the predefined constants
+   */
+  public static int getHashType(Configuration conf) {
+    String name = conf.get("hbase.hash.type", "murmur");
+    return parseHashType(name);
+  }
+
+  /**
+   * Get a singleton instance of hash function of a given type.
+   * @param type predefined hash type
+   * @return hash function instance, or null if type is invalid
+   */
+  public static Hash getInstance(int type) {
+    switch(type) {
+    case JENKINS_HASH:
+      return JenkinsHash.getInstance();
+    case MURMUR_HASH:
+      return MurmurHash.getInstance();
+    default:
+      return null;
+    }
+  }
+
+  /**
+   * Get a singleton instance of hash function of a type
+   * defined in the configuration.
+   * @param conf current configuration
+   * @return defined hash type, or null if type is invalid
+   */
+  public static Hash getInstance(Configuration conf) {
+    int type = getHashType(conf);
+    return getInstance(type);
+  }
+
+  /**
+   * Calculate a hash using all bytes from the input argument, and
+   * a seed of -1.
+   * @param bytes input bytes
+   * @return hash value
+   */
+  public int hash(byte[] bytes) {
+    return hash(bytes, bytes.length, -1);
+  }
+
+  /**
+   * Calculate a hash using all bytes from the input argument,
+   * and a provided seed value.
+   * @param bytes input bytes
+   * @param initval seed value
+   * @return hash value
+   */
+  public int hash(byte[] bytes, int initval) {
+    return hash(bytes, 0, bytes.length, initval);
+  }
+
+  /**
+   * Calculate a hash using bytes from 0 to <code>length</code>, and
+   * the provided seed value
+   * @param bytes input bytes
+   * @param length length of the valid bytes after offset to consider
+   * @param initval seed value
+   * @return hash value
+   */
+  public int hash(byte[] bytes, int length, int initval) {
+    return hash(bytes, 0, length, initval);
+  }
+
+  /**
+   * Calculate a hash using bytes from <code>offset</code> to <code>offset + 
+   * length</code>, and the provided seed value.
+   * @param bytes input bytes
+   * @param offset the offset into the array to start consideration
+   * @param length length of the valid bytes after offset to consider
+   * @param initval seed value
+   * @return hash value
+   */
+  public abstract int hash(byte[] bytes, int offset, int length, int initval);
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,261 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import static java.lang.Integer.rotateLeft;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Produces 32-bit hash for hash table lookup.
+ *
+ * <pre>lookup3.c, by Bob Jenkins, May 2006, Public Domain.
+ *
+ * You can use this free for any purpose.  It's in the public domain.
+ * It has no warranty.
+ * </pre>
+ *
+ * @see <a href="http://burtleburtle.net/bob/c/lookup3.c">lookup3.c</a>
+ * @see <a href="http://www.ddj.com/184410284">Hash Functions (and how this
+ * function compares to others such as CRC, MD?, etc</a>
+ * @see <a href="http://burtleburtle.net/bob/hash/doobs.html">Has update on the
+ * Dr. Dobbs Article</a>
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class JenkinsHash extends Hash {
+  private static final int BYTE_MASK = 0xff;
+
+  private static JenkinsHash _instance = new JenkinsHash();
+
+  public static Hash getInstance() {
+    return _instance;
+  }
+
+  /**
+   * taken from  hashlittle() -- hash a variable-length key into a 32-bit value
+   *
+   * @param key the key (the unaligned variable-length array of bytes)
+   * @param nbytes number of bytes to include in hash
+   * @param initval can be any integer value
+   * @return a 32-bit value.  Every bit of the key affects every bit of the
+   * return value.  Two keys differing by one or two bits will have totally
+   * different hash values.
+   *
+   * <p>The best hash table sizes are powers of 2.  There is no need to do mod
+   * a prime (mod is sooo slow!).  If you need less than 32 bits, use a bitmask.
+   * For example, if you need only 10 bits, do
+   * <code>h = (h & hashmask(10));</code>
+   * In which case, the hash table should have hashsize(10) elements.
+   *
+   * <p>If you are hashing n strings byte[][] k, do it like this:
+   * for (int i = 0, h = 0; i < n; ++i) h = hash( k[i], h);
+   *
+   * <p>By Bob Jenkins, 2006.  bob_jenkins@burtleburtle.net.  You may use this
+   * code any way you wish, private, educational, or commercial.  It's free.
+   *
+   * <p>Use for hash table lookup, or anything where one collision in 2^^32 is
+   * acceptable.  Do NOT use for cryptographic purposes.
+  */
+  @Override
+  @SuppressWarnings("fallthrough")
+  public int hash(byte[] key, int off, int nbytes, int initval) {
+    int length = nbytes;
+    int a, b, c;
+    a = b = c = 0xdeadbeef + length + initval;
+    int offset = off;
+    for (; length > 12; offset += 12, length -= 12) {
+      a += (key[offset] & BYTE_MASK);
+      a += ((key[offset + 1] & BYTE_MASK) <<  8);
+      a += ((key[offset + 2] & BYTE_MASK) << 16);
+      a += ((key[offset + 3] & BYTE_MASK) << 24);
+      b += (key[offset + 4] & BYTE_MASK);
+      b += ((key[offset + 5] & BYTE_MASK) <<  8);
+      b += ((key[offset + 6] & BYTE_MASK) << 16);
+      b += ((key[offset + 7] & BYTE_MASK) << 24);
+      c += (key[offset + 8] & BYTE_MASK);
+      c += ((key[offset + 9] & BYTE_MASK) <<  8);
+      c += ((key[offset + 10] & BYTE_MASK) << 16);
+      c += ((key[offset + 11] & BYTE_MASK) << 24);
+
+      /*
+       * mix -- mix 3 32-bit values reversibly.
+       * This is reversible, so any information in (a,b,c) before mix() is
+       * still in (a,b,c) after mix().
+       *
+       * If four pairs of (a,b,c) inputs are run through mix(), or through
+       * mix() in reverse, there are at least 32 bits of the output that
+       * are sometimes the same for one pair and different for another pair.
+       *
+       * This was tested for:
+       * - pairs that differed by one bit, by two bits, in any combination
+       *   of top bits of (a,b,c), or in any combination of bottom bits of
+       *   (a,b,c).
+       * - "differ" is defined as +, -, ^, or ~^.  For + and -, I transformed
+       *   the output delta to a Gray code (a^(a>>1)) so a string of 1's (as
+       *    is commonly produced by subtraction) look like a single 1-bit
+       *    difference.
+       * - the base values were pseudorandom, all zero but one bit set, or
+       *   all zero plus a counter that starts at zero.
+       *
+       * Some k values for my "a-=c; a^=rot(c,k); c+=b;" arrangement that
+       * satisfy this are
+       *     4  6  8 16 19  4
+       *     9 15  3 18 27 15
+       *    14  9  3  7 17  3
+       * Well, "9 15 3 18 27 15" didn't quite get 32 bits diffing for
+       * "differ" defined as + with a one-bit base and a two-bit delta.  I
+       * used http://burtleburtle.net/bob/hash/avalanche.html to choose
+       * the operations, constants, and arrangements of the variables.
+       *
+       * This does not achieve avalanche.  There are input bits of (a,b,c)
+       * that fail to affect some output bits of (a,b,c), especially of a.
+       * The most thoroughly mixed value is c, but it doesn't really even
+       * achieve avalanche in c.
+       *
+       * This allows some parallelism.  Read-after-writes are good at doubling
+       * the number of bits affected, so the goal of mixing pulls in the
+       * opposite direction as the goal of parallelism.  I did what I could.
+       * Rotates seem to cost as much as shifts on every machine I could lay
+       * my hands on, and rotates are much kinder to the top and bottom bits,
+       * so I used rotates.
+       *
+       * #define mix(a,b,c) \
+       * { \
+       *   a -= c;  a ^= rot(c, 4);  c += b; \
+       *   b -= a;  b ^= rot(a, 6);  a += c; \
+       *   c -= b;  c ^= rot(b, 8);  b += a; \
+       *   a -= c;  a ^= rot(c,16);  c += b; \
+       *   b -= a;  b ^= rot(a,19);  a += c; \
+       *   c -= b;  c ^= rot(b, 4);  b += a; \
+       * }
+       *
+       * mix(a,b,c);
+       */
+      a -= c; a ^= rotateLeft(c, 4); c += b;
+      b -= a; b ^= rotateLeft(a, 6); a += c;
+      c -= b; c ^= rotateLeft(b, 8); b += a;
+      a -= c; a ^= rotateLeft(c, 16); c += b;
+      b -= a; b ^= rotateLeft(a, 19); a += c;
+      c -= b; c ^= rotateLeft(b, 4); b += a;
+    }
+
+    //-------------------------------- last block: affect all 32 bits of (c)
+    switch (length) {                   // all the case statements fall through
+    case 12:
+    	c += ((key[offset + 11] & BYTE_MASK) << 24);
+    case 11:
+      c += ((key[offset + 10] & BYTE_MASK) << 16);
+    case 10:
+      c += ((key[offset + 9] & BYTE_MASK) <<  8);
+    case  9:
+      c += (key[offset + 8] & BYTE_MASK);
+    case  8:
+      b += ((key[offset + 7] & BYTE_MASK) << 24);
+    case  7:
+      b += ((key[offset + 6] & BYTE_MASK) << 16);
+    case  6:
+      b += ((key[offset + 5] & BYTE_MASK) <<  8);
+    case  5:
+      b += (key[offset + 4] & BYTE_MASK);
+    case  4:
+      a += ((key[offset + 3] & BYTE_MASK) << 24);
+    case  3:
+      a += ((key[offset + 2] & BYTE_MASK) << 16);
+    case  2:
+      a += ((key[offset + 1] & BYTE_MASK) <<  8);
+    case  1:
+      //noinspection PointlessArithmeticExpression
+      a += (key[offset + 0] & BYTE_MASK);
+      break;
+    case  0:
+      return c;
+    }
+    /*
+     * final -- final mixing of 3 32-bit values (a,b,c) into c
+     *
+     * Pairs of (a,b,c) values differing in only a few bits will usually
+     * produce values of c that look totally different.  This was tested for
+     * - pairs that differed by one bit, by two bits, in any combination
+     *   of top bits of (a,b,c), or in any combination of bottom bits of
+     *   (a,b,c).
+     *
+     * - "differ" is defined as +, -, ^, or ~^.  For + and -, I transformed
+     *   the output delta to a Gray code (a^(a>>1)) so a string of 1's (as
+     *   is commonly produced by subtraction) look like a single 1-bit
+     *   difference.
+     *
+     * - the base values were pseudorandom, all zero but one bit set, or
+     *   all zero plus a counter that starts at zero.
+     *
+     * These constants passed:
+     *   14 11 25 16 4 14 24
+     *   12 14 25 16 4 14 24
+     * and these came close:
+     *    4  8 15 26 3 22 24
+     *   10  8 15 26 3 22 24
+     *   11  8 15 26 3 22 24
+     *
+     * #define final(a,b,c) \
+     * {
+     *   c ^= b; c -= rot(b,14); \
+     *   a ^= c; a -= rot(c,11); \
+     *   b ^= a; b -= rot(a,25); \
+     *   c ^= b; c -= rot(b,16); \
+     *   a ^= c; a -= rot(c,4);  \
+     *   b ^= a; b -= rot(a,14); \
+     *   c ^= b; c -= rot(b,24); \
+     * }
+     *
+     */
+    c ^= b; c -= rotateLeft(b, 14);
+    a ^= c; a -= rotateLeft(c, 11);
+    b ^= a; b -= rotateLeft(a, 25);
+    c ^= b; c -= rotateLeft(b, 16);
+    a ^= c; a -= rotateLeft(c, 4);
+    b ^= a; b -= rotateLeft(a, 14);
+    c ^= b; c -= rotateLeft(b, 24);
+    return c;
+  }
+
+  /**
+   * Compute the hash of the specified file
+   * @param args name of file to compute hash of.
+   * @throws IOException e
+   */
+  public static void main(String[] args) throws IOException {
+    if (args.length != 1) {
+      System.err.println("Usage: JenkinsHash filename");
+      System.exit(-1);
+    }
+    FileInputStream in = new FileInputStream(args[0]);
+    byte[] bytes = new byte[512];
+    int value = 0;
+    JenkinsHash hash = new JenkinsHash();
+    for (int length = in.read(bytes); length > 0; length = in.read(bytes)) {
+      value = hash.hash(bytes, length, value);
+    }
+    System.out.println(Math.abs(value));
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Methods.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,69 @@
+/*
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.UndeclaredThrowableException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class Methods {
+  private static Log LOG = LogFactory.getLog(Methods.class);
+
+  public static <T> Object call(Class<T> clazz, T instance, String methodName,
+      Class[] types, Object[] args) throws Exception {
+    try {
+      Method m = clazz.getMethod(methodName, types);
+      return m.invoke(instance, args);
+    } catch (IllegalArgumentException arge) {
+      LOG.fatal("Constructed invalid call. class="+clazz.getName()+
+          " method=" + methodName + " types=" + Classes.stringify(types), arge);
+      throw arge;
+    } catch (NoSuchMethodException nsme) {
+      throw new IllegalArgumentException(
+          "Can't find method "+methodName+" in "+clazz.getName()+"!", nsme);
+    } catch (InvocationTargetException ite) {
+      // unwrap the underlying exception and rethrow
+      if (ite.getTargetException() != null) {
+        if (ite.getTargetException() instanceof Exception) {
+          throw (Exception)ite.getTargetException();
+        } else if (ite.getTargetException() instanceof Error) {
+          throw (Error)ite.getTargetException();
+        }
+      }
+      throw new UndeclaredThrowableException(ite,
+          "Unknown exception invoking "+clazz.getName()+"."+methodName+"()");
+    } catch (IllegalAccessException iae) {
+      throw new IllegalArgumentException(
+          "Denied access calling "+clazz.getName()+"."+methodName+"()", iae);
+    } catch (SecurityException se) {
+      LOG.fatal("SecurityException calling method. class="+clazz.getName()+
+          " method=" + methodName + " types=" + Classes.stringify(types), se);
+      throw se;
+    }
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,92 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * This is a very fast, non-cryptographic hash suitable for general hash-based
+ * lookup.  See http://murmurhash.googlepages.com/ for more details.
+ *
+ * <p>The C version of MurmurHash 2.0 found at that site was ported
+ * to Java by Andrzej Bialecki (ab at getopt org).</p>
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class MurmurHash extends Hash {
+  private static MurmurHash _instance = new MurmurHash();
+
+  public static Hash getInstance() {
+    return _instance;
+  }
+
+  @Override
+  public int hash(byte[] data, int offset, int length, int seed) {
+    int m = 0x5bd1e995;
+    int r = 24;
+
+    int h = seed ^ length;
+
+    int len_4 = length >> 2;
+
+    for (int i = 0; i < len_4; i++) {
+      int i_4 = (i << 2) + offset;
+      int k = data[i_4 + 3];
+      k = k << 8;
+      k = k | (data[i_4 + 2] & 0xff);
+      k = k << 8;
+      k = k | (data[i_4 + 1] & 0xff);
+      k = k << 8;
+      //noinspection PointlessArithmeticExpression
+      k = k | (data[i_4 + 0] & 0xff);
+      k *= m;
+      k ^= k >>> r;
+      k *= m;
+      h *= m;
+      h ^= k;
+    }
+
+    // avoid calculating modulo
+    int len_m = len_4 << 2;
+    int left = length - len_m;
+    int i_m = len_m + offset;
+
+    if (left != 0) {
+      if (left >= 3) {
+        h ^= data[i_m + 2] << 16;
+      }
+      if (left >= 2) {
+        h ^= data[i_m + 1] << 8;
+      }
+      if (left >= 1) {
+        h ^= data[i_m];
+      }
+
+      h *= m;
+    }
+
+    h ^= h >>> 13;
+    h *= m;
+    h ^= h >>> 15;
+
+    return h;
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,135 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A generic class for pairs.
+ * @param <T1>
+ * @param <T2>
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class Pair<T1, T2> implements Serializable
+{
+  private static final long serialVersionUID = -3986244606585552569L;
+  protected T1 first = null;
+  protected T2 second = null;
+
+  /**
+   * Default constructor.
+   */
+  public Pair()
+  {
+  }
+
+  /**
+   * Constructor
+   * @param a operand
+   * @param b operand
+   */
+  public Pair(T1 a, T2 b)
+  {
+    this.first = a;
+    this.second = b;
+  }
+  
+  /**
+   * Constructs a new pair, inferring the type via the passed arguments
+   * @param <T1> type for first
+   * @param <T2> type for second
+   * @param a first element
+   * @param b second element
+   * @return a new pair containing the passed arguments
+   */
+  public static <T1,T2> Pair<T1,T2> newPair(T1 a, T2 b) {
+    return new Pair<T1,T2>(a, b);
+  }
+  
+  /**
+   * Replace the first element of the pair.
+   * @param a operand
+   */
+  public void setFirst(T1 a)
+  {
+    this.first = a;
+  }
+
+  /**
+   * Replace the second element of the pair.
+   * @param b operand
+   */
+  public void setSecond(T2 b)
+  {
+    this.second = b;
+  }
+
+  /**
+   * Return the first element stored in the pair.
+   * @return T1
+   */
+  public T1 getFirst()
+  {
+    return first;
+  }
+
+  /**
+   * Return the second element stored in the pair.
+   * @return T2
+   */
+  public T2 getSecond()
+  {
+    return second;
+  }
+
+  private static boolean equals(Object x, Object y)
+  {
+    return (x == null && y == null) || (x != null && x.equals(y));
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public boolean equals(Object other)
+  {
+    return other instanceof Pair && equals(first, ((Pair)other).first) &&
+      equals(second, ((Pair)other).second);
+  }
+
+  @Override
+  public int hashCode()
+  {
+    if (first == null)
+      return (second == null) ? 0 : second.hashCode() + 1;
+    else if (second == null)
+      return first.hashCode() + 2;
+    else
+      return first.hashCode() * 17 + second.hashCode();
+  }
+
+  @Override
+  public String toString()
+  {
+    return "{" + getFirst() + "," + getSecond() + "}";
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,115 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+import java.util.Iterator;
+
+import org.apache.commons.lang.NotImplementedException;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A generic, immutable class for pairs of objects both of type <code>T</code>.
+ * @param <T>
+ * @see Pair if Types differ.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class PairOfSameType<T> implements Iterable<T> {
+  private final T first;
+  private final T second;
+
+  /**
+   * Constructor
+   * @param a operand
+   * @param b operand
+   */
+  public PairOfSameType(T a, T b) {
+    this.first = a;
+    this.second = b;
+  }
+
+  /**
+   * Return the first element stored in the pair.
+   * @return T
+   */
+  public T getFirst() {
+    return first;
+  }
+
+  /**
+   * Return the second element stored in the pair.
+   * @return T
+   */
+  public T getSecond() {
+    return second;
+  }
+
+  private static boolean equals(Object x, Object y) {
+     return (x == null && y == null) || (x != null && x.equals(y));
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public boolean equals(Object other) {
+    return other instanceof PairOfSameType &&
+      equals(first, ((PairOfSameType)other).first) &&
+      equals(second, ((PairOfSameType)other).second);
+  }
+
+  @Override
+  public int hashCode() {
+    if (first == null)
+      return (second == null) ? 0 : second.hashCode() + 1;
+    else if (second == null)
+      return first.hashCode() + 2;
+    else
+      return first.hashCode() * 17 + second.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return "{" + getFirst() + "," + getSecond() + "}";
+  }
+
+  @Override
+  public Iterator<T> iterator() {
+    return new Iterator<T>() {
+      private int returned = 0;
+
+      @Override
+      public boolean hasNext() {
+        return this.returned < 2;
+      }
+
+      @Override
+      public T next() {
+        if (++this.returned == 1) return getFirst();
+        else if (this.returned == 2) return getSecond();
+        else throw new IllegalAccessError("this.returned=" + this.returned);
+      }
+
+      @Override
+      public void remove() {
+        throw new NotImplementedException();
+      }
+    };
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,69 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class RetryCounter {
+  private static final Log LOG = LogFactory.getLog(RetryCounter.class);
+  private final int maxRetries;
+  private int retriesRemaining;
+  private final int retryIntervalMillis;
+  private final TimeUnit timeUnit;
+
+  public RetryCounter(int maxRetries, 
+  int retryIntervalMillis, TimeUnit timeUnit) {
+    this.maxRetries = maxRetries;
+    this.retriesRemaining = maxRetries;
+    this.retryIntervalMillis = retryIntervalMillis;
+    this.timeUnit = timeUnit;
+  }
+
+  public int getMaxRetries() {
+    return maxRetries;
+  }
+
+  /**
+   * Sleep for a exponentially back off time
+   * @throws InterruptedException
+   */
+  public void sleepUntilNextRetry() throws InterruptedException {
+    int attempts = getAttemptTimes();
+    long sleepTime = (long) (retryIntervalMillis * Math.pow(2, attempts));
+    LOG.info("Sleeping " + sleepTime + "ms before retry #" + attempts + "...");
+    timeUnit.sleep(sleepTime);
+  }
+
+  public boolean shouldRetry() {
+    return retriesRemaining > 0;
+  }
+
+  public void useRetry() {
+    retriesRemaining--;
+  }
+  
+  public int getAttemptTimes() {
+    return maxRetries-retriesRemaining+1;
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,40 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+@InterfaceAudience.Private
+public class RetryCounterFactory {
+  private final int maxRetries;
+  private final int retryIntervalMillis;
+
+  public RetryCounterFactory(int maxRetries, int retryIntervalMillis) {
+    this.maxRetries = maxRetries;
+    this.retryIntervalMillis = retryIntervalMillis;
+  }
+
+  public RetryCounter create() {
+    return new RetryCounter(
+      maxRetries, retryIntervalMillis, TimeUnit.MILLISECONDS
+    );
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,289 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.lang.ref.Reference;
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.SoftReference;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * A SortedMap implementation that uses Soft Reference values
+ * internally to make it play well with the GC when in a low-memory
+ * situation. Use as a cache where you also need SortedMap functionality.
+ *
+ * @param <K> key class
+ * @param <V> value class
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class SoftValueSortedMap<K,V> implements SortedMap<K,V> {
+  private final SortedMap<K, SoftValue<K,V>> internalMap;
+  private final ReferenceQueue<V> rq = new ReferenceQueue<V>();
+  private final Object sync;
+
+  /** Constructor */
+  public SoftValueSortedMap() {
+    this(new TreeMap<K, SoftValue<K,V>>());
+  }
+
+  /**
+   * Constructor
+   * @param c comparator
+   */
+  public SoftValueSortedMap(final Comparator<K> c) {
+    this(new TreeMap<K, SoftValue<K,V>>(c));
+  }
+
+  /** Internal constructor
+   * @param original object to wrap and synchronize on
+   */
+  private SoftValueSortedMap(SortedMap<K,SoftValue<K,V>> original) {
+    this(original, original);
+  }
+
+  /** Internal constructor
+   * For headMap, tailMap, and subMap support
+   * @param original object to wrap
+   * @param sync object to synchronize on
+   */
+  private SoftValueSortedMap(SortedMap<K,SoftValue<K,V>> original, Object sync) {
+    this.internalMap = original;
+    this.sync = sync;
+  }
+
+  /**
+   * Checks soft references and cleans any that have been placed on
+   * ReferenceQueue.  Call if get/put etc. are not called regularly.
+   * Internally these call checkReferences on each access.
+   * @return How many references cleared.
+   */
+  @SuppressWarnings("unchecked")
+  private int checkReferences() {
+    int i = 0;
+    for (Reference<? extends V> ref; (ref = this.rq.poll()) != null;) {
+      i++;
+      this.internalMap.remove(((SoftValue<K,V>)ref).key);
+    }
+    return i;
+  }
+
+  public V put(K key, V value) {
+    synchronized(sync) {
+      checkReferences();
+      SoftValue<K,V> oldValue = this.internalMap.put(key,
+        new SoftValue<K,V>(key, value, this.rq));
+      return oldValue == null ? null : oldValue.get();
+    }
+  }
+
+  @Override
+  public void putAll(Map<? extends K, ? extends V> m) {
+    throw new RuntimeException("Not implemented");
+  }
+
+  public V get(Object key) {
+    synchronized(sync) {
+      checkReferences();
+      SoftValue<K,V> value = this.internalMap.get(key);
+      if (value == null) {
+        return null;
+      }
+      if (value.get() == null) {
+        this.internalMap.remove(key);
+        return null;
+      }
+      return value.get();
+    }
+  }
+
+  public V remove(Object key) {
+    synchronized(sync) {
+      checkReferences();
+      SoftValue<K,V> value = this.internalMap.remove(key);
+      return value == null ? null : value.get();
+    }
+  }
+
+  public boolean containsKey(Object key) {
+    synchronized(sync) {
+      checkReferences();
+      return this.internalMap.containsKey(key);
+    }
+  }
+
+  public boolean containsValue(Object value) {
+    throw new UnsupportedOperationException("Don't support containsValue!");
+  }
+
+  public K firstKey() {
+    synchronized(sync) {
+      checkReferences();
+      return internalMap.firstKey();
+    }
+  }
+
+  public K lastKey() {
+    synchronized(sync) {
+      checkReferences();
+      return internalMap.lastKey();
+    }
+  }
+
+  public SoftValueSortedMap<K,V> headMap(K key) {
+    synchronized(sync) {
+      checkReferences();
+      return new SoftValueSortedMap<K,V>(this.internalMap.headMap(key), sync);
+    }
+  }
+
+  public SoftValueSortedMap<K,V> tailMap(K key) {
+    synchronized(sync) {
+      checkReferences();
+      return new SoftValueSortedMap<K,V>(this.internalMap.tailMap(key), sync);
+    }
+  }
+
+  public SoftValueSortedMap<K,V> subMap(K fromKey, K toKey) {
+    synchronized(sync) {
+      checkReferences();
+      return new SoftValueSortedMap<K,V>(this.internalMap.subMap(fromKey,
+          toKey), sync);
+    }
+  }
+
+  /*
+   * retrieves the value associated with the greatest key strictly less than
+   *  the given key, or null if there is no such key
+   * @param key the key we're interested in
+   */
+  public synchronized V lowerValueByKey(K key) {
+    synchronized(sync) {
+      checkReferences();
+
+      Map.Entry<K,SoftValue<K,V>> entry =
+        ((NavigableMap<K, SoftValue<K,V>>) this.internalMap).lowerEntry(key);
+      if (entry==null) {
+        return null;
+      }
+      SoftValue<K,V> value=entry.getValue();
+      if (value==null) {
+        return null;
+      }
+      if (value.get() == null) {
+        this.internalMap.remove(key);
+        return null;
+      }
+      return value.get();
+    }
+  }
+  
+  public boolean isEmpty() {
+    synchronized(sync) {
+      checkReferences();
+      return this.internalMap.isEmpty();
+    }
+  }
+
+  public int size() {
+    synchronized(sync) {
+      checkReferences();
+      return this.internalMap.size();
+    }
+  }
+
+  public void clear() {
+    synchronized(sync) {
+      checkReferences();
+      this.internalMap.clear();
+    }
+  }
+
+  public Set<K> keySet() {
+    synchronized(sync) {
+      checkReferences();
+      // this is not correct as per SortedMap contract (keySet should be
+      // modifiable)
+      // needed here so that another thread cannot modify the keyset
+      // without locking
+      return Collections.unmodifiableSet(this.internalMap.keySet());
+    }
+  }
+
+  public Comparator<? super K> comparator() {
+    return this.internalMap.comparator();
+  }
+
+  public Set<Map.Entry<K,V>> entrySet() {
+    synchronized(sync) {
+      checkReferences();
+      // this is not correct as per SortedMap contract (entrySet should be
+      // backed by map)
+      Set<Map.Entry<K, V>> realEntries = new LinkedHashSet<Map.Entry<K, V>>();
+      for (Map.Entry<K, SoftValue<K, V>> entry : this.internalMap.entrySet()) {
+        realEntries.add(entry.getValue());
+      }
+      return realEntries;
+    }
+  }
+
+  public Collection<V> values() {
+    synchronized(sync) {
+      checkReferences();
+      ArrayList<V> hardValues = new ArrayList<V>();
+      for (SoftValue<K,V> softValue : this.internalMap.values()) {
+        hardValues.add(softValue.get());
+      }
+      return hardValues;
+    }
+  }
+
+  private static class SoftValue<K,V> extends SoftReference<V> implements Map.Entry<K, V> {
+    final K key;
+
+    SoftValue(K key, V value, ReferenceQueue<V> q) {
+      super(value, q);
+      this.key = key;
+    }
+
+    public K getKey() {
+      return this.key;
+    }
+
+    public V getValue() {
+      return get();
+    }
+
+    public V setValue(V value) {
+      throw new RuntimeException("Not implemented");
+    }
+  }
+}

Added: hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java?rev=1449950&view=auto
==============================================================================
--- hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java (added)
+++ hbase/trunk/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java Mon Feb 25 22:50:17 2013
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.util;
+
+/**
+ * Utility class to manage a triple.
+ */
+public class Triple<A, B, C> {
+  private A first;
+  private B second;
+  private C third;
+
+  public Triple(A first, B second, C third) {
+    this.first = first;
+    this.second = second;
+    this.third = third;
+  }
+
+  public int hashCode() {
+    int hashFirst = (first != null ? first.hashCode() : 0);
+    int hashSecond = (second != null ? second.hashCode() : 0);
+    int hashThird = (third != null ? third.hashCode() : 0);
+
+    return (hashFirst >> 1) ^ hashSecond ^ (hashThird << 1);
+  }
+
+  public boolean equals(Object obj) {
+    if (!(obj instanceof Triple)) {
+      return false;
+    }
+
+    Triple<?, ?, ?> otherTriple = (Triple<?, ?, ?>) obj;
+
+    if (first != otherTriple.first && (first != null && !(first.equals(otherTriple.first))))
+      return false;
+    if (second != otherTriple.second && (second != null && !(second.equals(otherTriple.second))))
+      return false;
+    if (third != otherTriple.third && (third != null && !(third.equals(otherTriple.third))))
+      return false;
+
+    return true;
+  }
+
+  public String toString() {
+    return "(" + first + ", " + second + "," + third + " )";
+  }
+
+  public A getFirst() {
+    return first;
+  }
+
+  public void setFirst(A first) {
+    this.first = first;
+  }
+
+  public B getSecond() {
+    return second;
+  }
+
+  public void setSecond(B second) {
+    this.second = second;
+  }
+
+  public C getThird() {
+    return third;
+  }
+
+  public void setThird(C third) {
+    this.third = third;
+  }
+}
+
+
+