You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/05/06 02:37:06 UTC
svn commit: r772021 - in /hadoop/hbase/trunk: ./
src/java/org/apache/hadoop/hbase/
src/java/org/apache/hadoop/hbase/regionserver/
src/java/org/apache/hadoop/hbase/util/ src/test/org/apache/hadoop/hbase/
Author: stack
Date: Wed May 6 00:37:05 2009
New Revision: 772021
URL: http://svn.apache.org/viewvc?rev=772021&view=rev
Log:
HBASE-1336 Splitting up the compare of family+column into 2 different compares
Modified:
hadoop/hbase/trunk/CHANGES.txt
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java
Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=772021&r1=772020&r2=772021&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Wed May 6 00:37:05 2009
@@ -110,6 +110,7 @@
HBASE-1264 Wrong return values of comparators for ColumnValueFilter
(Thomas Schneider via Andrew Purtell)
HBASE-1374 NPE out of ZooKeeperWrapper.loadZooKeeperConfig
+ HBASE-1336 Splitting up the compare of family+column into 2 different compare
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java?rev=772021&r1=772020&r2=772021&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/KeyValue.java Wed May 6 00:37:05 2009
@@ -772,13 +772,17 @@
/**
* @param column Column minus its delimiter
+ * @param familylength Length of family in passed <code>column</code>
* @return True if column matches.
* @see #matchingColumn(byte[])
*/
- public boolean matchingColumnNoDelimiter(final byte [] column) {
+ public boolean matchingColumnNoDelimiter(final byte [] column,
+ final int familylength) {
int o = getColumnOffset();
int l = getColumnLength(o);
- return compareColumns(getBuffer(), o, l, column, 0, column.length) == 0;
+ int f = getFamilyLength(o);
+ return compareColumns(getBuffer(), o, l, f,
+ column, 0, column.length, familylength) == 0;
}
/**
@@ -801,15 +805,27 @@
* @param left
* @param loffset
* @param llength
+ * @param lfamilylength Offset of family delimiter in left column.
* @param right
* @param roffset
* @param rlength
+ * @param rfamilylength Offset of family delimiter in right column.
* @return
*/
static int compareColumns(final byte [] left, final int loffset,
- final int llength, final byte [] right, final int roffset,
- final int rlength) {
- return Bytes.compareTo(left, loffset, llength, right, roffset, rlength);
+ final int llength, final int lfamilylength,
+ final byte [] right, final int roffset, final int rlength,
+ final int rfamilylength) {
+ // Compare family portion first.
+ int diff = Bytes.compareTo(left, loffset, lfamilylength,
+ right, roffset, rfamilylength);
+ if (diff != 0) {
+ return diff;
+ }
+ // Compare qualifier portion
+ return Bytes.compareTo(left, loffset + lfamilylength,
+ llength - lfamilylength,
+ right, roffset + rfamilylength, rlength - rfamilylength);
}
/**
@@ -1037,11 +1053,12 @@
}
public int compareColumns(final KeyValue left, final byte [] right,
- final int roffset, final int rlength) {
+ final int roffset, final int rlength, final int rfamilyoffset) {
int offset = left.getColumnOffset();
int length = left.getColumnLength(offset);
return getRawComparator().compareColumns(left.getBuffer(), offset, length,
- right, roffset, rlength);
+ left.getFamilyLength(offset),
+ right, roffset, rlength, rfamilyoffset);
}
int compareColumns(final KeyValue left, final short lrowlength,
@@ -1051,9 +1068,11 @@
int roffset = right.getColumnOffset(rrowlength);
int llength = left.getColumnLength(loffset, lkeylength);
int rlength = right.getColumnLength(roffset, rkeylength);
+ int lfamilylength = left.getFamilyLength(loffset);
+ int rfamilylength = right.getFamilyLength(roffset);
return getRawComparator().compareColumns(left.getBuffer(), loffset,
- llength,
- right.getBuffer(), roffset, rlength);
+ llength, lfamilylength,
+ right.getBuffer(), roffset, rlength, rfamilylength);
}
/**
@@ -1381,9 +1400,11 @@
return Bytes.compareTo(left, loffset, llength, right, roffset, rlength);
}
- protected int compareColumns(byte [] left, int loffset, int llength,
- byte [] right, int roffset, int rlength) {
- return KeyValue.compareColumns(left, loffset, llength, right, roffset, rlength);
+ protected int compareColumns(
+ byte [] left, int loffset, int llength, final int lfamilylength,
+ byte [] right, int roffset, int rlength, final int rfamilylength) {
+ return KeyValue.compareColumns(left, loffset, llength, lfamilylength,
+ right, roffset, rlength, rfamilylength);
}
int compareTimestamps(final long ltimestamp, final long rtimestamp) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java?rev=772021&r1=772020&r2=772021&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java Wed May 6 00:37:05 2009
@@ -126,6 +126,7 @@
private Pattern columnMatcher;
// Column without delimiter so easy compare to KeyValue column
private byte [] col;
+ private int familylength = 0;
ColumnMatcher(final byte [] col) throws IOException {
byte [][] parse = parseColumn(col);
@@ -150,6 +151,7 @@
} else {
this.matchType = MATCH_TYPE.SIMPLE;
this.col = columnWithoutDelimiter;
+ this.familylength = parse[0].length;
this.wildCardmatch = false;
}
} catch(Exception e) {
@@ -165,7 +167,7 @@
*/
boolean matches(final KeyValue kv) throws IOException {
if (this.matchType == MATCH_TYPE.SIMPLE) {
- return kv.matchingColumnNoDelimiter(this.col);
+ return kv.matchingColumnNoDelimiter(this.col, this.familylength);
} else if(this.matchType == MATCH_TYPE.FAMILY_ONLY) {
return kv.matchingFamily(this.family);
} else if (this.matchType == MATCH_TYPE.REGEX) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java?rev=772021&r1=772020&r2=772021&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Store.java Wed May 6 00:37:05 2009
@@ -309,7 +309,9 @@
// Check this edit is for me. Also, guard against writing the speical
// METACOLUMN info such as HBASE::CACHEFLUSH entries
KeyValue kv = val.getKeyValue();
- if (val.isTransactionEntry() || kv.matchingColumnNoDelimiter(HLog.METACOLUMN) ||
+ if (val.isTransactionEntry() ||
+ kv.matchingColumnNoDelimiter(HLog.METACOLUMN,
+ HLog.METACOLUMN.length - 1) ||
!Bytes.equals(key.getRegionName(), regioninfo.getRegionName()) ||
!kv.matchingFamily(family.getName())) {
continue;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=772021&r1=772020&r2=772021&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java Wed May 6 00:37:05 2009
@@ -251,7 +251,8 @@
HRegionInfo info = null;
for (KeyValue kv: results) {
if (KeyValue.META_COMPARATOR.compareColumns(kv,
- HConstants.COL_REGIONINFO, 0, HConstants.COL_REGIONINFO.length) == 0) {
+ HConstants.COL_REGIONINFO, 0, HConstants.COL_REGIONINFO.length,
+ HConstants.COLUMN_FAMILY_STR.length()) == 0) {
info = Writables.getHRegionInfoOrNull(kv.getValue());
if (info == null) {
LOG.warn("region info is null for row " +
Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java?rev=772021&r1=772020&r2=772021&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestKeyValue.java Wed May 6 00:37:05 2009
@@ -34,7 +34,20 @@
public class TestKeyValue extends TestCase {
private final Log LOG = LogFactory.getLog(this.getClass().getName());
-
+
+ public void testColumnCompare() throws Exception {
+ final byte [] a = Bytes.toBytes("aaa");
+ byte [] column1 = Bytes.toBytes("abc:def");
+ byte [] column2 = Bytes.toBytes("abcd:ef");
+ KeyValue aaa = new KeyValue(a, column1, a);
+ assertFalse(KeyValue.COMPARATOR.
+ compareColumns(aaa, column2, 0, column2.length, 4) == 0);
+ column1 = Bytes.toBytes("abcd:");
+ aaa = new KeyValue(a, column1, a);
+ assertFalse(KeyValue.COMPARATOR.
+ compareColumns(aaa, column1, 0, column1.length, 4) == 0);
+ }
+
public void testBasics() throws Exception {
LOG.info("LOWKEY: " + KeyValue.LOWESTKEY.toString());
check(Bytes.toBytes(getName()),