You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2009/10/20 16:06:56 UTC
svn commit: r827474 - in /hadoop/hbase/branches/0.20: ./
src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/
src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/
src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/t...
Author: apurtell
Date: Tue Oct 20 14:06:55 2009
New Revision: 827474
URL: http://svn.apache.org/viewvc?rev=827474&view=rev
Log:
HBASE-1916 FindBugs and javac warnings cleanup
Modified:
hadoop/hbase/branches/0.20/CHANGES.txt
hadoop/hbase/branches/0.20/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java
hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/MiniClusterTestCase.java
hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestSchemaResource.java
hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java
hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Scan.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreFileToStoreFile.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreKey.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/CountingBloomFilter.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/DynamicBloomFilter.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/WildcardColumnTracker.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Bytes.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/FSUtils.java
hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Migrate.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/io/hfile/RandomSeek.java
hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
Modified: hadoop/hbase/branches/0.20/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/CHANGES.txt?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/CHANGES.txt (original)
+++ hadoop/hbase/branches/0.20/CHANGES.txt Tue Oct 20 14:06:55 2009
@@ -13,6 +13,7 @@
Purtell)
HBASE-1917 TestScanner.testFilters failing
HBASE-1908 ROOT not reassigned if only one regionserver left
+ HBASE-1916 FindBugs and javac warnings cleanup
IMPROVEMENTS
HBASE-1899 Use scanner caching in shell count
Modified: hadoop/hbase/branches/0.20/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java (original)
+++ hadoop/hbase/branches/0.20/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java Tue Oct 20 14:06:55 2009
@@ -43,8 +43,8 @@
private static RESTServlet instance;
- private final HBaseConfiguration conf;
- private final HTablePool pool;
+ private transient final HBaseConfiguration conf;
+ private transient final HTablePool pool;
protected Map<String,Integer> maxAgeMap =
Collections.synchronizedMap(new HashMap<String,Integer>());
Modified: hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/MiniClusterTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/MiniClusterTestCase.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/MiniClusterTestCase.java (original)
+++ hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/MiniClusterTestCase.java Tue Oct 20 14:06:55 2009
@@ -208,7 +208,7 @@
stopDFS();
}
- class MiniClusterShutdownThread extends Thread {
+ static class MiniClusterShutdownThread extends Thread {
public void run() {
stopMiniCluster();
Path path = new Path(
Modified: hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestSchemaResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestSchemaResource.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestSchemaResource.java (original)
+++ hadoop/hbase/branches/0.20/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestSchemaResource.java Tue Oct 20 14:06:55 2009
@@ -53,6 +53,7 @@
@Override
protected void setUp() throws Exception {
+ super.setUp();
admin = new HBaseAdmin(conf);
client = new Client(new Cluster().add("localhost", testServletPort));
}
Modified: hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java (original)
+++ hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java Tue Oct 20 14:06:55 2009
@@ -37,7 +37,6 @@
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.RowResult;
import org.apache.hadoop.hbase.regionserver.tableindexed.IndexMaintenanceUtils;
Modified: hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java (original)
+++ hadoop/hbase/branches/0.20/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java Tue Oct 20 14:06:55 2009
@@ -120,7 +120,11 @@
final HRegionInfo regionInfo, final FlushRequester flushListener,
final Leases transactionalLeases) {
super(basedir, log, fs, conf, regionInfo, flushListener);
- this.hlog = (THLog) log;
+ if (log instanceof THLog) {
+ this.hlog = (THLog) log;
+ } else {
+ throw new RuntimeException("log is not THLog");
+ }
oldTransactionFlushTrigger = conf.getInt(OLD_TRANSACTION_FLUSH,
DEFAULT_OLD_TRANSACTION_FLUSH);
this.transactionLeases = transactionalLeases;
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Tue Oct 20 14:06:55 2009
@@ -197,7 +197,7 @@
* Get this watcher's ZKW, instanciate it if necessary.
* @return ZKW
*/
- public ZooKeeperWrapper getZooKeeperWrapper() throws IOException {
+ public synchronized ZooKeeperWrapper getZooKeeperWrapper() throws IOException {
if(zooKeeperWrapper == null) {
zooKeeperWrapper = new ZooKeeperWrapper(conf, this);
}
@@ -323,6 +323,7 @@
if (tryMaster.isMasterRunning()) {
this.master = tryMaster;
+ this.masterLock.notifyAll();
break;
}
@@ -340,7 +341,7 @@
// Cannot connect to master or it is not running. Sleep & retry
try {
- Thread.sleep(getPauseTime(tries));
+ this.masterLock.wait(getPauseTime(tries));
} catch (InterruptedException e) {
// continue
}
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Scan.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Scan.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Scan.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/client/Scan.java Tue Oct 20 14:06:55 2009
@@ -235,27 +235,32 @@
* @return The columns in an old style string format.
*/
public String getInputColumns() {
- String cols = "";
+ StringBuilder cols = new StringBuilder();
for (Map.Entry<byte[], NavigableSet<byte[]>> e :
familyMap.entrySet()) {
byte[] fam = e.getKey();
- if (cols.length() > 0) cols += " ";
+ if (cols.length() > 0) {
+ cols.append(" ");
+ }
NavigableSet<byte[]> quals = e.getValue();
// check if this family has qualifiers
if (quals != null && quals.size() > 0) {
- String cs = "";
for (byte[] qual : quals) {
- if (cs.length() > 0) cs += " ";
+ if (cols.length() > 0) {
+ cols.append(" ");
+ }
// encode values to make parsing easier later
- cs += Bytes.toStringBinary(fam) + ":" + Bytes.toStringBinary(qual);
+ cols.append(Bytes.toStringBinary(fam));
+ cols.append(":");
+ cols.append(Bytes.toStringBinary(qual));
}
- cols += cs;
} else {
// only add the family but with old style delimiter
- cols += Bytes.toStringBinary(fam) + ":";
+ cols.append(Bytes.toStringBinary(fam));
+ cols.append(":");
}
}
- return cols;
+ return cols.toString();
}
/**
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/io/RowResult.java Tue Oct 20 14:06:55 2009
@@ -268,7 +268,7 @@
sb.append(ioe.toString());
}
} else {
- sb.append(v);
+ sb.append(Bytes.toStringBinary(v));
}
sb.append(")");
}
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreFileToStoreFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreFileToStoreFile.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreFileToStoreFile.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreFileToStoreFile.java Tue Oct 20 14:06:55 2009
@@ -104,11 +104,15 @@
// only be family directories. Under each of these, should be a mapfile
// and info directory and in these only one file.
Path d = tableDirs[i].getPath();
- if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) continue;
+ if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
+ continue;
+ }
FileStatus [] regionDirs = fs.listStatus(d, new DirFilter(fs));
for (int j = 0; j < regionDirs.length; j++) {
Path dd = regionDirs[j].getPath();
- if (dd.equals(HConstants.HREGION_COMPACTIONDIR_NAME)) continue;
+ if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
+ continue;
+ }
// Else its a region name. Now look in region for families.
FileStatus [] familyDirs = fs.listStatus(dd, new DirFilter(fs));
for (int k = 0; k < familyDirs.length; k++) {
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreKey.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreKey.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/HStoreKey.java Tue Oct 20 14:06:55 2009
@@ -338,6 +338,9 @@
@Override
public boolean equals(Object obj) {
+ if (!(obj instanceof HStoreKey)) {
+ return false;
+ }
HStoreKey other = (HStoreKey)obj;
// Do a quick check.
if (this.row.length != other.row.length ||
@@ -479,7 +482,8 @@
byte [][] result = new byte [2][];
int index = getFamilyDelimiterIndex(c);
if (index == -1) {
- throw new ColumnNameParseException("Impossible column name: " + c);
+ throw new ColumnNameParseException("Impossible column name: " +
+ Bytes.toStringBinary(c));
}
result[0] = new byte [index];
System.arraycopy(c, 0, result[0], 0, index);
@@ -647,7 +651,10 @@
@Override
public boolean equals(Object obj) {
- return false;
+ if (obj == null || !(obj instanceof HStoreKey)) {
+ return false;
+ }
+ return compareTo((HStoreKey)obj) == 0;
}
@Override
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/CountingBloomFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/CountingBloomFilter.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/CountingBloomFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/CountingBloomFilter.java Tue Oct 20 14:06:55 2009
@@ -50,7 +50,6 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.util.Arrays; //TODO: remove
import org.apache.hadoop.hbase.util.Hash;
@@ -67,7 +66,7 @@
* @version 1.1 - 19 Jan. 08
*
*/
-public final class CountingBloomFilter extends Filter {
+public final class CountingBloomFilter extends Filter implements Cloneable {
/** Storage for the counting buckets */
private long[] buckets;
@@ -254,7 +253,6 @@
}//end or()
@Override
- @SuppressWarnings("unused")
public void xor(Filter filter){
throw new UnsupportedOperationException("xor() is undefined for "
+ this.getClass().getName());
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/DynamicBloomFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/DynamicBloomFilter.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/DynamicBloomFilter.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/migration/nineteen/onelab/filter/DynamicBloomFilter.java Tue Oct 20 14:06:55 2009
@@ -82,7 +82,7 @@
* @version 1.0 - 6 Feb. 07
*
*/
-public class DynamicBloomFilter extends Filter {
+public class DynamicBloomFilter extends Filter implements Cloneable {
/**
* Threshold for the maximum number of key to record in a dynamic Bloom filter row.
*/
@@ -233,7 +233,7 @@
}//end toString()
@Override
- public Object clone(){
+ public Object clone() {
DynamicBloomFilter dbf = new DynamicBloomFilter(vectorSize, nbHash, hashType, nr);
dbf.currentNbRecord = this.currentNbRecord;
dbf.matrix = new BloomFilter[this.matrix.length];
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java Tue Oct 20 14:06:55 2009
@@ -118,9 +118,9 @@
@Override
public int hashCode() {
- int result = this.regionName.hashCode();
- result ^= this.logSeqNum;
- result ^= this.writeTime;
+ int result = Bytes.hashCode(this.regionName);
+ result ^= (int)this.logSeqNum;
+ result ^= (int)this.writeTime;
return result;
}
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Tue Oct 20 14:06:55 2009
@@ -1278,12 +1278,13 @@
// queue at time iterator was taken out. Apparently goes from oldest.
for (ToDoEntry e: this.toDo) {
HMsg msg = e.msg;
- if (msg == null) {
+ if (msg != null) {
+ if (msg.isType(HMsg.Type.MSG_REGION_OPEN)) {
+ addProcessingMessage(msg.getRegionInfo());
+ }
+ } else {
LOG.warn("Message is empty: " + e);
}
- if (e.msg.isType(HMsg.Type.MSG_REGION_OPEN)) {
- addProcessingMessage(e.msg.getRegionInfo());
- }
}
}
@@ -1956,7 +1957,8 @@
null: results.toArray(new Result[0]);
} catch (Throwable t) {
if (t instanceof NotServingRegionException) {
- this.scanners.remove(scannerId);
+ String scannerName = String.valueOf(scannerId);
+ this.scanners.remove(scannerName);
}
throw convertThrowableToIOE(cleanup(t));
}
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java Tue Oct 20 14:06:55 2009
@@ -420,7 +420,7 @@
* Immutable data structure to hold member found in set and the set it was
* found in. Include set because it is carrying context.
*/
- private class Member {
+ private static class Member {
final KeyValue kv;
final NavigableSet<KeyValue> set;
Member(final NavigableSet<KeyValue> s, final KeyValue kv) {
@@ -628,9 +628,9 @@
boolean cacheNextRow() {
// Prevent snapshot being cleared while caching a row.
lock.readLock().lock();
- this.result.clear();
- this.idx = 0;
try {
+ this.result.clear();
+ this.idx = 0;
// Look at each set, kvset and snapshot.
// Both look for matching entries for this.current row returning what
// they
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/WildcardColumnTracker.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/WildcardColumnTracker.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/WildcardColumnTracker.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/regionserver/WildcardColumnTracker.java Tue Oct 20 14:06:55 2009
@@ -165,74 +165,81 @@
this.newColumn = newColumns.get(newIndex);
return MatchCode.INCLUDE;
}
-
-
- // There are new and old, figure which to check first
- int ret = Bytes.compareTo(column.getBuffer(), column.getOffset(),
+
+ if (column != null && newColumn != null) {
+ // There are new and old, figure which to check first
+ int ret = Bytes.compareTo(column.getBuffer(), column.getOffset(),
column.getLength(), newColumn.getBuffer(), newColumn.getOffset(),
newColumn.getLength());
- // Old is smaller than new, compare against old
- if(ret <= -1) {
- ret = Bytes.compareTo(column.getBuffer(), column.getOffset(),
+ // Old is smaller than new, compare against old
+ if(ret <= -1) {
+ ret = Bytes.compareTo(column.getBuffer(), column.getOffset(),
column.getLength(), bytes, offset, length);
+ // Same column
+ if(ret == 0) {
+ if(column.increment() > this.maxVersions) {
+ return MatchCode.SKIP;
+ }
+ return MatchCode.INCLUDE;
+ }
+
+ // Specified column is bigger than current column
+ // Move down current column and check again
+ if(ret <= -1) {
+ if(++index == columns.size()) {
+ this.column = null;
+ } else {
+ this.column = columns.get(index);
+ }
+ return checkColumn(bytes, offset, length);
+ }
+
+ // ret >= 1
+ // Specified column is smaller than current column
+ // Nothing to match against, add to new and include
+ newColumns.add(new ColumnCount(bytes, offset, length, 1));
+ return MatchCode.INCLUDE;
+ }
+ }
+
+ // Cannot be equal, so ret >= 1
+ // New is smaller than old, compare against new
+ if (newColumn != null) {
+
+ int ret = Bytes.compareTo(newColumn.getBuffer(), newColumn.getOffset(),
+ newColumn.getLength(), bytes, offset, length);
+
// Same column
if(ret == 0) {
- if(column.increment() > this.maxVersions) {
+ if(newColumn.increment() > this.maxVersions) {
return MatchCode.SKIP;
}
return MatchCode.INCLUDE;
}
-
+
// Specified column is bigger than current column
// Move down current column and check again
if(ret <= -1) {
- if(++index == columns.size()) {
- this.column = null;
+ if(++newIndex == newColumns.size()) {
+ this.newColumn = null;
} else {
- this.column = columns.get(index);
+ this.newColumn = newColumns.get(newIndex);
}
return checkColumn(bytes, offset, length);
}
-
+
// ret >= 1
// Specified column is smaller than current column
// Nothing to match against, add to new and include
newColumns.add(new ColumnCount(bytes, offset, length, 1));
return MatchCode.INCLUDE;
}
-
- // Cannot be equal, so ret >= 1
- // New is smaller than old, compare against new
-
- ret = Bytes.compareTo(newColumn.getBuffer(), newColumn.getOffset(),
- newColumn.getLength(), bytes, offset, length);
-
- // Same column
- if(ret == 0) {
- if(newColumn.increment() > this.maxVersions) {
- return MatchCode.SKIP;
- }
- return MatchCode.INCLUDE;
- }
-
- // Specified column is bigger than current column
- // Move down current column and check again
- if(ret <= -1) {
- if(++newIndex == newColumns.size()) {
- this.newColumn = null;
- } else {
- this.newColumn = newColumns.get(newIndex);
- }
- return checkColumn(bytes, offset, length);
- }
-
- // ret >= 1
- // Specified column is smaller than current column
- // Nothing to match against, add to new and include
+
+ // No match happened, add to new and include
newColumns.add(new ColumnCount(bytes, offset, length, 1));
- return MatchCode.INCLUDE;
+ return MatchCode.INCLUDE;
}
/**
@@ -242,7 +249,7 @@
// If no previous columns, use new columns and return
if(this.columns == null || this.columns.size() == 0) {
if(this.newColumns.size() > 0){
- finalize(newColumns);
+ finish(newColumns);
}
return;
}
@@ -273,7 +280,7 @@
if(++index == columns.size()) {
// No more existing left, merge down rest of new and return
mergeDown(mergeColumns, newColumns, newIndex);
- finalize(mergeColumns);
+ finish(mergeColumns);
return;
}
column = columns.get(index);
@@ -285,7 +292,7 @@
if(++newIndex == newColumns.size()) {
// No more new left, merge down rest of existing and return
mergeDown(mergeColumns, columns, index);
- finalize(mergeColumns);
+ finish(mergeColumns);
return;
}
newColumn = newColumns.get(newIndex);
@@ -301,7 +308,7 @@
}
}
- private void finalize(List<ColumnCount> mergeColumns) {
+ private void finish(List<ColumnCount> mergeColumns) {
this.columns = mergeColumns;
this.index = 0;
this.column = this.columns.size() > 0? columns.get(index) : null;
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Bytes.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Bytes.java Tue Oct 20 14:06:55 2009
@@ -281,10 +281,9 @@
}
public static String toStringBinary(final byte [] b, int off, int len) {
- String result = null;
+ StringBuilder result = new StringBuilder();
try {
String first = new String(b, off, len, "ISO-8859-1");
- result = "";
for (int i = 0; i < first.length() ; ++i ) {
int ch = first.charAt(i) & 0xFF;
if ( (ch >= '0' && ch <= '9')
@@ -295,15 +294,15 @@
|| ch == '-'
|| ch == ':'
|| ch == '.') {
- result += first.charAt(i);
+ result.append(first.charAt(i));
} else {
- result += String.format("\\x%02X", ch);
+ result.append(String.format("\\x%02X", ch));
}
}
} catch ( UnsupportedEncodingException e) {
e.printStackTrace();
}
- return result;
+ return result.toString();
}
private static boolean isHexDigit(char c) {
@@ -638,7 +637,7 @@
public static byte[] toBytes(short val) {
byte[] b = new byte[SIZEOF_SHORT];
b[1] = (byte)(val);
- val >>>= 8;
+ val >>= 8;
b[0] = (byte)(val);
return b;
}
@@ -693,7 +692,7 @@
return offset;
}
bytes[offset+1] = (byte)(val);
- val >>>= 8;
+ val >>= 8;
bytes[offset] = (byte)(val);
return offset + SIZEOF_SHORT;
}
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/FSUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/FSUtils.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/FSUtils.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/FSUtils.java Tue Oct 20 14:06:55 2009
@@ -301,11 +301,15 @@
// should be regions. Then in each region, should only be family
// directories. Under each of these, should be one file only.
Path d = tableDirs[i].getPath();
- if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) continue;
+ if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
+ continue;
+ }
FileStatus [] regionDirs = fs.listStatus(d, new DirFilter(fs));
for (int j = 0; j < regionDirs.length; j++) {
Path dd = regionDirs[j].getPath();
- if (dd.equals(HConstants.HREGION_COMPACTIONDIR_NAME)) continue;
+ if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
+ continue;
+ }
// Else its a region name. Now look in region for families.
FileStatus [] familyDirs = fs.listStatus(dd, new DirFilter(fs));
for (int k = 0; k < familyDirs.length; k++) {
@@ -360,11 +364,15 @@
// only be family directories. Under each of these, should be a mapfile
// and info directory and in these only one file.
Path d = tableDirs[i].getPath();
- if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) continue;
+ if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
+ continue;
+ }
FileStatus [] regionDirs = fs.listStatus(d, new DirFilter(fs));
for (int j = 0; j < regionDirs.length; j++) {
Path dd = regionDirs[j].getPath();
- if (dd.equals(HConstants.HREGION_COMPACTIONDIR_NAME)) continue;
+ if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
+ continue;
+ }
// Else its a region name. Now look in region for families.
FileStatus [] familyDirs = fs.listStatus(dd, new DirFilter(fs));
for (int k = 0; k < familyDirs.length; k++) {
Modified: hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Migrate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Migrate.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Migrate.java (original)
+++ hadoop/hbase/branches/0.20/src/java/org/apache/hadoop/hbase/util/Migrate.java Tue Oct 20 14:06:55 2009
@@ -322,11 +322,15 @@
// only be family directories. Under each of these, should be a mapfile
// and info directory and in these only one file.
Path d = tableDirs[i].getPath();
- if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) continue;
+ if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
+ continue;
+ }
FileStatus [] regionDirs = fs.listStatus(d, new DirFilter(fs));
for (int j = 0; j < regionDirs.length; j++) {
Path dd = regionDirs[j].getPath();
- if (dd.equals(HConstants.HREGION_COMPACTIONDIR_NAME)) continue;
+ if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
+ continue;
+ }
// Else its a region name. Now look in region for families.
FileStatus [] familyDirs = fs.listStatus(dd, new DirFilter(fs));
for (int k = 0; k < familyDirs.length; k++) {
Modified: hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/io/hfile/RandomSeek.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/io/hfile/RandomSeek.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/io/hfile/RandomSeek.java (original)
+++ hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/io/hfile/RandomSeek.java Tue Oct 20 14:06:55 2009
@@ -46,8 +46,10 @@
String [] parts = str.split(",");
l.add(parts[0] + ":" + parts[1] + ":" + parts[2]);
}
+ istream.close();
return l;
}
+
private static String randKey(List<String> keys) {
Random r = new Random();
//return keys.get(r.nextInt(keys.size()));
Modified: hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java?rev=827474&r1=827473&r2=827474&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java (original)
+++ hadoop/hbase/branches/0.20/src/test/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java Tue Oct 20 14:06:55 2009
@@ -237,8 +237,8 @@
if (first.compareTo(second) != 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("second key is not the reverse of first. row=" +
- r.getRow() + ", first value=" + first + ", second value=" +
- second);
+ Bytes.toStringBinary(r.getRow()) + ", first value=" + first +
+ ", second value=" + second);
}
fail();
}