You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/05/03 19:11:58 UTC

svn commit: r771087 - in /hadoop/hbase/trunk: ./ conf/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/regionserver/ src/java/org/apache/hadoop/hbase/thrift/ src/java/org/apache/hadoop/hbase/thrift/generated/ src/java/org/apache/hado...

Author: stack
Date: Sun May  3 17:11:58 2009
New Revision: 771087

URL: http://svn.apache.org/viewvc?rev=771087&view=rev
Log:
HBASE-1318 Thrift server doesnt know about atomicIncrement -- undoing this commit because it mistakenly included hbase-1336 patch

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/conf/hbase-env.sh
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/Hbase.thrift
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Sun May  3 17:11:58 2009
@@ -102,7 +102,6 @@
    HBASE-1347  HTable.incrementColumnValue does not take negative 'amount'
                (Evgeny Ryabitskiy via Stack)
    HBASE-1365  Typo in TableInputFormatBase.setInputColums (Jon Gray via Stack)
-   HBASE-1318  Thrift server doesnt know about atomicIncrement
 
   IMPROVEMENTS
    HBASE-1089  Add count of regions on filesystem to master UI; add percentage
@@ -190,10 +189,6 @@
    HBASE-1112  we will lose data if the table name happens to be the logs' dir
                name (Samuel Guo via Stack)
 
-  NEW FEATURES
-
-  OPTIMIZATIONS
-
 Release 0.19.0 - 01/21/2009
   INCOMPATIBLE CHANGES
    HBASE-885   TableMap and TableReduce should be interfaces

Modified: hadoop/hbase/trunk/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-env.sh?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-env.sh (original)
+++ hadoop/hbase/trunk/conf/hbase-env.sh Sun May  3 17:11:58 2009
@@ -23,6 +23,7 @@
 
 # The java implementation to use.  Java 1.6 required.
 # export JAVA_HOME=/usr/java/jdk1.6.0/
+export JAVA_HOME=/usr/lib/jvm/java-6-sun-1.6.0.07/
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HBASE_CLASSPATH=

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java Sun May  3 17:11:58 2009
@@ -772,17 +772,13 @@
 
   /**
    * @param column Column minus its delimiter
-   * @param familylength Length of family in passed <code>column</code>
    * @return True if column matches.
    * @see #matchingColumn(byte[])
    */
-  public boolean matchingColumnNoDelimiter(final byte [] column,
-      final int familylength) {
+  public boolean matchingColumnNoDelimiter(final byte [] column) {
     int o = getColumnOffset();
     int l = getColumnLength(o);
-    int f = getFamilyLength(o);
-    return compareColumns(getBuffer(), o, l, f,
-      column, 0, column.length, familylength) == 0;
+    return compareColumns(getBuffer(), o, l, column, 0, column.length) == 0;
   }
 
   /**
@@ -805,26 +801,15 @@
    * @param left
    * @param loffset
    * @param llength
-   * @param lfamilylength Offset of family delimiter in left column.
    * @param right
    * @param roffset
    * @param rlength
-   * @param rfamilylength Offset of family delimiter in right column.
    * @return
    */
   static int compareColumns(final byte [] left, final int loffset,
-      final int lfamilylength, final int llength,
-      final byte [] right, final int roffset, final int rlength,
-      final int rfamilylength) {
-    // Compare family portion first.
-    int diff = Bytes.compareTo(left, loffset, lfamilylength,
-      right, roffset, rfamilylength);
-    if (diff != 0) {
-      return diff;
-    }
-    // Compare qualifier portion
-    return Bytes.compareTo(left, loffset + lfamilylength,
-      llength, right, roffset + rfamilylength, rlength);
+      final int llength, final byte [] right, final int roffset,
+      final int rlength) {
+    return Bytes.compareTo(left, loffset, llength, right, roffset, rlength);
   }
 
   /**
@@ -1052,12 +1037,11 @@
     }
 
     public int compareColumns(final KeyValue left, final byte [] right,
-        final int roffset, final int rlength, final int rfamilyoffset) {
+        final int roffset, final int rlength) {
       int offset = left.getColumnOffset();
       int length = left.getColumnLength(offset);
       return getRawComparator().compareColumns(left.getBuffer(), offset, length,
-        left.getFamilyLength(offset),
-        right, roffset, rlength, rfamilyoffset);
+        right, roffset, rlength);
     }
 
     int compareColumns(final KeyValue left, final short lrowlength,
@@ -1067,11 +1051,9 @@
       int roffset = right.getColumnOffset(rrowlength);
       int llength = left.getColumnLength(loffset, lkeylength);
       int rlength = right.getColumnLength(roffset, rkeylength);
-      int lfamilyoffset = left.getFamilyLength(loffset);
-      int rfamilyoffset = right.getFamilyLength(roffset);
       return getRawComparator().compareColumns(left.getBuffer(), loffset,
-          llength, lfamilyoffset,
-        right.getBuffer(), roffset, rlength, rfamilyoffset);
+          llength,
+        right.getBuffer(), roffset, rlength);
     }
 
     /**
@@ -1399,11 +1381,9 @@
       return Bytes.compareTo(left, loffset, llength, right, roffset, rlength);
     }
 
-    protected int compareColumns(
-        byte [] left, int loffset, int llength, final int lfamilylength,
-        byte [] right, int roffset, int rlength, final int rfamilylength) {
-      return KeyValue.compareColumns(left, loffset, llength, lfamilylength,
-        right, roffset, rlength, rfamilylength);
+    protected int compareColumns(byte [] left, int loffset, int llength,
+        byte [] right, int roffset, int rlength) {
+      return KeyValue.compareColumns(left, loffset, llength, right, roffset, rlength);
     }
 
     int compareTimestamps(final long ltimestamp, final long rtimestamp) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java Sun May  3 17:11:58 2009
@@ -126,7 +126,6 @@
     private Pattern columnMatcher;
     // Column without delimiter so easy compare to KeyValue column
     private byte [] col;
-    private int familylength = 0;
   
     ColumnMatcher(final byte [] col) throws IOException {
       byte [][] parse = parseColumn(col);
@@ -151,7 +150,6 @@
         } else {
           this.matchType = MATCH_TYPE.SIMPLE;
           this.col = columnWithoutDelimiter;
-          this.familylength = parse[0].length;
           this.wildCardmatch = false;
         }
       } catch(Exception e) {
@@ -167,7 +165,7 @@
      */
     boolean matches(final KeyValue kv) throws IOException {
       if (this.matchType == MATCH_TYPE.SIMPLE) {
-        return kv.matchingColumnNoDelimiter(this.col, this.familylength);
+        return kv.matchingColumnNoDelimiter(this.col);
       } else if(this.matchType == MATCH_TYPE.FAMILY_ONLY) {
         return kv.matchingFamily(this.family);
       } else if (this.matchType == MATCH_TYPE.REGEX) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java Sun May  3 17:11:58 2009
@@ -309,9 +309,7 @@
         // Check this edit is for me. Also, guard against writing the speical
         // METACOLUMN info such as HBASE::CACHEFLUSH entries
         KeyValue kv = val.getKeyValue();
-        if (val.isTransactionEntry() ||
-            kv.matchingColumnNoDelimiter(HLog.METACOLUMN,
-              HLog.METACOLUMN.length - 1) ||
+        if (val.isTransactionEntry() || kv.matchingColumnNoDelimiter(HLog.METACOLUMN) ||
           !Bytes.equals(key.getRegionName(), regioninfo.getRegionName()) ||
           !kv.matchingFamily(family.getName())) {
           continue;

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/Hbase.thrift
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/Hbase.thrift?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/Hbase.thrift (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/Hbase.thrift Sun May  3 17:11:58 2009
@@ -376,17 +376,7 @@
    * @param timestamp timestamp
    */
   void mutateRowsTs(1:Text tableName, 2:list<BatchMutation> rowBatches, 3:i64 timestamp)
-    throws (1:IOError io, 2:IllegalArgument ia)
-
-  /**
-   * Atomically increment the column value specified.  Returns the next value post increment.
-   * @param tableName name of table
-   * @param row row to increment
-   * @param column name of column
-   * @param value amount to increment by
-   */
-  i64 atomicIncrement(1:Text tableName, 2:Text row, 3:Text column, 4:i64 value)
-    throws (1:IOError io, 2:IllegalArgument ia)
+    throws (1:IOError io, 2:IllegalArgument ia)  
     
   /** 
    * Delete all cells that match the passed row and column.

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java Sun May  3 17:11:58 2009
@@ -422,17 +422,7 @@
         throw new IllegalArgument(e.getMessage());
       }
     }
-
-    public long atomicIncrement(byte[] tableName, byte[] row, byte[] column, long amount) throws IOError, IllegalArgument, TException {
-      HTable table;
-      try {
-        table = getTable(tableName);
-        return table.incrementColumnValue(row, column, amount);
-      } catch (IOException e) {
-        throw new IOError(e.getMessage());
-      }
-    }
-
+ 
     public void scannerClose(int id) throws IOError, IllegalArgument {
       LOG.debug("scannerClose: id=" + id);
       Scanner scanner = getScanner(id);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java Sun May  3 17:11:58 2009
@@ -234,8 +234,6 @@
      */
     public void mutateRowsTs(byte[] tableName, List<BatchMutation> rowBatches, long timestamp) throws IOError, IllegalArgument, TException;
 
-    public long atomicIncrement(byte[] tableName, byte[] row, byte[] column, long value) throws IOError, IllegalArgument, TException;
-    
     /**
      * Delete all cells that match the passed row and column.
      * 
@@ -1187,49 +1185,6 @@
       return;
     }
 
-    public long atomicIncrement(byte[] tableName, byte[] row, byte[] column, long value) throws IOError, IllegalArgument, TException
-    {
-      send_atomicIncrement(tableName, row, column, value);
-      return recv_atomicIncrement();
-    }
-
-    public void send_atomicIncrement(byte[] tableName, byte[] row, byte[] column, long value) throws TException
-    {
-      oprot_.writeMessageBegin(new TMessage("atomicIncrement", TMessageType.CALL, seqid_));
-      atomicIncrement_args args = new atomicIncrement_args();
-      args.tableName = tableName;
-      args.row = row;
-      args.column = column;
-      args.value = value;
-      args.write(oprot_);
-      oprot_.writeMessageEnd();
-      oprot_.getTransport().flush();
-    }
-
-    public long recv_atomicIncrement() throws IOError, IllegalArgument, TException
-    {
-      TMessage msg = iprot_.readMessageBegin();
-      if (msg.type == TMessageType.EXCEPTION) {
-        TApplicationException x = TApplicationException.read(iprot_);
-        iprot_.readMessageEnd();
-        throw x;
-      }
-      atomicIncrement_result result = new atomicIncrement_result();
-      result.read(iprot_);
-      iprot_.readMessageEnd();
-      if (result.__isset.success) {
-        return result.success;
-      }
-      if (result.__isset.io) {
-        throw result.io;
-      }
-      if (result.__isset.ia) {
-        throw result.ia;
-      }
-      throw new TApplicationException(TApplicationException.MISSING_RESULT, "atomicIncrement failed: unknown result");
-    }
-    
-
     public void deleteAll(byte[] tableName, byte[] row, byte[] column) throws IOError, TException
     {
       send_deleteAll(tableName, row, column);
@@ -1630,7 +1585,6 @@
       processMap_.put("mutateRowTs", new mutateRowTs());
       processMap_.put("mutateRows", new mutateRows());
       processMap_.put("mutateRowsTs", new mutateRowsTs());
-      processMap_.put("atomicIncrement", new atomicIncrement());
       processMap_.put("deleteAll", new deleteAll());
       processMap_.put("deleteAllTs", new deleteAllTs());
       processMap_.put("deleteAllRow", new deleteAllRow());
@@ -2162,32 +2116,6 @@
 
     }
 
-    private class atomicIncrement implements ProcessFunction {
-      public void process(int seqid, TProtocol iprot, TProtocol oprot) throws TException
-      {
-        atomicIncrement_args args = new atomicIncrement_args();
-        args.read(iprot);
-        iprot.readMessageEnd();
-        atomicIncrement_result result = new atomicIncrement_result();
-        try {
-          result.success = iface_.atomicIncrement(args.tableName, args.row, args.column, args.value);
-          result.__isset.success = true;
-        } catch (IOError io) {
-          result.io = io;
-          result.__isset.io = true;
-        } catch (IllegalArgument ia) {
-          result.ia = ia;
-          result.__isset.ia = true;
-        }
-        oprot.writeMessageBegin(new TMessage("atomicIncrement", TMessageType.REPLY, seqid));
-        result.write(oprot);
-        oprot.writeMessageEnd();
-        oprot.getTransport().flush();
-      }
-
-    }
-
-
     private class deleteAll implements ProcessFunction {
       public void process(int seqid, TProtocol iprot, TProtocol oprot) throws TException
       {
@@ -8558,379 +8486,9 @@
       sb.append(")");
       return sb.toString();
     }
-  }
-
-  public static class atomicIncrement_args implements TBase, java.io.Serializable   {
-    public byte[] tableName;
-    public byte[] row;
-    public byte[] column;
-    public long value;
-
-    public final Isset __isset = new Isset();
-    public static final class Isset implements java.io.Serializable {
-      public boolean tableName = false;
-      public boolean row = false;
-      public boolean column = false;
-      public boolean value = false;
-
-    }
-
-    public atomicIncrement_args() {
-    }
-
-    public atomicIncrement_args(
-        byte[] tableName,
-        byte[] row,
-        byte[] column,
-        long value)
-    {
-      this();
-      this.tableName = tableName;
-      this.__isset.tableName = true;
-      this.row = row;
-      this.__isset.row = true;
-      this.column = column;
-      this.__isset.column = true;
-      this.value = value;
-      this.__isset.value = true;
-    }
-
-    public boolean equals(Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof atomicIncrement_args)
-        return this.equals((atomicIncrement_args)that);
-      return false;
-    }
-
-    public boolean equals(atomicIncrement_args that) {
-      if (that == null)
-        return false;
-
-      boolean this_present_tableName = true && (this.tableName != null);
-      boolean that_present_tableName = true && (that.tableName != null);
-      if (this_present_tableName || that_present_tableName) {
-        if (!(this_present_tableName && that_present_tableName))
-          return false;
-        if (!java.util.Arrays.equals(this.tableName, that.tableName))
-          return false;
-      }
-
-      boolean this_present_row = true && (this.row != null);
-      boolean that_present_row = true && (that.row != null);
-      if (this_present_row || that_present_row) {
-        if (!(this_present_row && that_present_row))
-          return false;
-        if (!java.util.Arrays.equals(this.row, that.row))
-          return false;
-      }
-
-
-      boolean this_present_column = true && (this.column != null);
-      boolean that_present_column = true && (that.column != null);
-      if (this_present_column || that_present_column) {
-        if (!(this_present_column && that_present_column))
-          return false;
-        if (!java.util.Arrays.equals(this.column, that.column))
-          return false;
-      }
-
-      boolean this_present_value = true;
-      boolean that_present_value = true;
-      if (this_present_value || that_present_value) {
-        if (!(this_present_value && that_present_value))
-          return false;
-        if (this.value != that.value)
-          return false;
-      }
-
-      return true;
-    }
-
-    public int hashCode() {
-      return 0;
-    }
-
-    public void read(TProtocol iprot) throws TException {
-      TField field;
-      iprot.readStructBegin();
-      while (true)
-      {
-        field = iprot.readFieldBegin();
-        if (field.type == TType.STOP) {
-          break;
-        }
-        switch (field.id)
-        {
-          case 1:
-            if (field.type == TType.STRING) {
-              this.tableName = iprot.readBinary();
-              this.__isset.tableName = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          case 2:
-            if (field.type == TType.STRING) {
-              this.row = iprot.readBinary();
-              this.__isset.row = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          case 3:
-            if (field.type == TType.STRING) {
-              this.column = iprot.readBinary();
-              this.__isset.column = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          case 4:
-            if (field.type == TType.I64) {
-              this.value = iprot.readI64();
-              this.__isset.value = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          default:
-            TProtocolUtil.skip(iprot, field.type);
-            break;
-        }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
-    }
-
-    public void write(TProtocol oprot) throws TException {
-      TStruct struct = new TStruct("atomicIncrement_args");
-      oprot.writeStructBegin(struct);
-      TField field = new TField();
-      if (this.tableName != null) {
-        field.name = "tableName";
-        field.type = TType.STRING;
-        field.id = 1;
-        oprot.writeFieldBegin(field);
-        oprot.writeBinary(this.tableName);
-        oprot.writeFieldEnd();
-      }
-      if (this.row != null) {
-        field.name = "row";
-        field.type = TType.STRING;
-        field.id = 2;
-        oprot.writeFieldBegin(field);
-        oprot.writeBinary(this.row);
-        oprot.writeFieldEnd();
-      }
-      if (this.column != null) {
-        field.name = "column";
-        field.type = TType.STRING;
-        field.id = 3;
-        oprot.writeFieldBegin(field);
-        oprot.writeBinary(this.column);
-        oprot.writeFieldEnd();
-      }
-      field.name = "value";
-      field.type = TType.I64;
-      field.id = 4;
-      oprot.writeFieldBegin(field);
-      oprot.writeI64(this.value);
-      oprot.writeFieldEnd();
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
-    }
-
-    public String toString() {
-      StringBuilder sb = new StringBuilder("atomicIncrement_args(");
-      sb.append("tableName:");
-      sb.append(this.tableName);
-      sb.append(",row:");
-      sb.append(this.row);
-      sb.append(",column:");
-      sb.append(this.column);
-      sb.append(",value:");
-      sb.append(this.value);
-      sb.append(")");
-      return sb.toString();
-    }
-
-  }
-
-  public static class atomicIncrement_result implements TBase, java.io.Serializable   {
-    public long success;
-    public IOError io;
-    public IllegalArgument ia;
-
-    public final Isset __isset = new Isset();
-    public static final class Isset implements java.io.Serializable {
-      public boolean success = false;
-      public boolean io = false;
-      public boolean ia = false;
-    }
-
-    public atomicIncrement_result() {
-    }
-
-    public atomicIncrement_result(
-        long success,
-        IOError io,
-        IllegalArgument ia)
-    {
-      this();
-      this.success = success;
-      this.__isset.success = true;
-      this.io = io;
-      this.__isset.io = true;
-      this.ia = ia;
-      this.__isset.ia = true;
-    }
-
-    public boolean equals(Object that) {
-      if (that == null)
-        return false;
-      if (that instanceof atomicIncrement_result)
-        return this.equals((atomicIncrement_result)that);
-      return false;
-    }
-
-    public boolean equals(atomicIncrement_result that) {
-      if (that == null)
-        return false;
-
-      boolean this_present_success = true;
-      boolean that_present_success = true;
-      if (this_present_success || that_present_success) {
-        if (!(this_present_success && that_present_success))
-          return false;
-        if (this.success != that.success)
-          return false;
-      }
-
-      boolean this_present_io = true && (this.io != null);
-      boolean that_present_io = true && (that.io != null);
-      if (this_present_io || that_present_io) {
-        if (!(this_present_io && that_present_io))
-          return false;
-        if (!this.io.equals(that.io))
-          return false;
-      }
-
-      boolean this_present_ia = true && (this.ia != null);
-      boolean that_present_ia = true && (that.ia != null);
-      if (this_present_ia || that_present_ia) {
-        if (!(this_present_ia && that_present_ia))
-          return false;
-        if (!this.ia.equals(that.ia))
-          return false;
-      }
-
-      return true;
-    }
-
-    public int hashCode() {
-      return 0;
-    }
-
-    public void read(TProtocol iprot) throws TException {
-      TField field;
-      iprot.readStructBegin();
-      while (true)
-      {
-        field = iprot.readFieldBegin();
-        if (field.type == TType.STOP) {
-          break;
-        }
-        switch (field.id)
-        {
-          case 0:
-            if (field.type == TType.I64) {
-              this.success = iprot.readI64();
-              this.__isset.success = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          case 1:
-            if (field.type == TType.STRUCT) {
-              this.io = new IOError();
-              this.io.read(iprot);
-              this.__isset.io = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          case 2:
-            if (field.type == TType.STRUCT) {
-              this.ia = new IllegalArgument();
-              this.ia.read(iprot);
-              this.__isset.ia = true;
-            } else {
-              TProtocolUtil.skip(iprot, field.type);
-            }
-            break;
-          default:
-            TProtocolUtil.skip(iprot, field.type);
-            break;
-        }
-        iprot.readFieldEnd();
-      }
-      iprot.readStructEnd();
-    }
-
-    public void write(TProtocol oprot) throws TException {
-      TStruct struct = new TStruct("atomicIncrement_result");
-      oprot.writeStructBegin(struct);
-      TField field = new TField();
-
-      if (this.__isset.success) {
-        field.name = "success";
-        field.type = TType.I64;
-        field.id = 0;
-        oprot.writeFieldBegin(field);
-        oprot.writeI64(this.success);
-        oprot.writeFieldEnd();
-      } else if (this.__isset.io) {
-        if (this.io != null) {
-          field.name = "io";
-          field.type = TType.STRUCT;
-          field.id = 1;
-          oprot.writeFieldBegin(field);
-          this.io.write(oprot);
-          oprot.writeFieldEnd();
-        }
-      } else if (this.__isset.ia) {
-        if (this.ia != null) {
-          field.name = "ia";
-          field.type = TType.STRUCT;
-          field.id = 2;
-          oprot.writeFieldBegin(field);
-          this.ia.write(oprot);
-          oprot.writeFieldEnd();
-        }
-      }
-      oprot.writeFieldStop();
-      oprot.writeStructEnd();
-    }
-
-    public String toString() {
-      StringBuilder sb = new StringBuilder("atomicIncrement_result(");
-      sb.append("success:");
-      sb.append(this.success);
-      sb.append(",io:");
-      sb.append(this.io.toString());
-      sb.append(",ia:");
-      sb.append(this.ia.toString());
-
-
-      sb.append(")");
-      return sb.toString();
-    }
 
   }
 
-
-
   public static class deleteAll_args implements TBase, java.io.Serializable   {
     public byte[] tableName;
     public byte[] row;

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java Sun May  3 17:11:58 2009
@@ -251,8 +251,7 @@
         HRegionInfo info = null;
         for (KeyValue kv: results) {
           if (KeyValue.META_COMPARATOR.compareColumns(kv,
-            HConstants.COL_REGIONINFO, 0, HConstants.COL_REGIONINFO.length,
-              HConstants.COLUMN_FAMILY_STR.length()) == 0) {
+            HConstants.COL_REGIONINFO, 0, HConstants.COL_REGIONINFO.length) == 0) {
             info = Writables.getHRegionInfoOrNull(kv.getValue());
             if (info == null) {
               LOG.warn("region info is null for row " +

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java?rev=771087&r1=771086&r2=771087&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java Sun May  3 17:11:58 2009
@@ -34,16 +34,7 @@
 
 public class TestKeyValue extends TestCase {
   private final Log LOG = LogFactory.getLog(this.getClass().getName());
-
-  public void testColumnCompare() throws Exception {
-    final byte [] a = Bytes.toBytes("aaa");
-    final byte [] column1 = Bytes.toBytes("abc:def");
-    final byte [] column2 = Bytes.toBytes("abcd:ef");
-    KeyValue aaa = new KeyValue(a, column1, a);
-    assertFalse(KeyValue.COMPARATOR.
-      compareColumns(aaa, column2, 0, column2.length, 4) == 0);
-  }
-
+  
   public void testBasics() throws Exception {
     LOG.info("LOWKEY: " + KeyValue.LOWESTKEY.toString());
     check(Bytes.toBytes(getName()),