You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2012/04/23 10:46:43 UTC
svn commit: r1329132 - in /hbase/trunk: dev-support/
src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/
src/main/java/org/apache/hadoop/hbase/
src/main/java/org/apache/hadoop/hbase/filter/
src/main/java/org/apache/hadoop/hbase/io/hfile/ src/main...
Author: jmhsieh
Date: Mon Apr 23 08:46:42 2012
New Revision: 1329132
URL: http://svn.apache.org/viewvc?rev=1329132&view=rev
Log:
HBASE-5654 [findbugs] Address dodgy bugs (Ashutosh Jindal)
Modified:
hbase/trunk/dev-support/findbugs-exclude.xml
hbase/trunk/dev-support/test-patch.properties
hbase/trunk/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
hbase/trunk/src/main/java/org/apache/hadoop/hbase/HServerLoad.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
hbase/trunk/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
Modified: hbase/trunk/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hbase/trunk/dev-support/findbugs-exclude.xml?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/dev-support/findbugs-exclude.xml (original)
+++ hbase/trunk/dev-support/findbugs-exclude.xml Mon Apr 23 08:46:42 2012
@@ -48,4 +48,51 @@
<Bug pattern="NP_NULL_PARAM_DEREF" />
</Match>
+ <Match>
+ <Class name="org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader"/>
+ <Or>
+ <Method name="addFileInfoToException" />
+ </Or>
+ <Bug pattern="REC_CATCH_EXCEPTION" />
+ </Match>
+
+
+ <Match>
+ <Class name="org.apache.hadoop.hbase.KeyValue"/>
+ <Or>
+ <Method name="createEmptyByteArray" />
+ <Method name="createByteArray" />
+ </Or>
+ <Bug pattern="INT_VACUOUS_COMPARISON" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.hbase.regionserver.LruHashMap"/>
+ <Or>
+ <Method name="equals" />
+ </Or>
+ <Bug pattern="EQ_UNUSUAL" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.hbase.util.ByteBufferUtils"/>
+ <Or>
+ <Method name="putInt" />
+ </Or>
+ <Bug pattern="ICAST_QUESTIONABLE_UNSIGNED_RIGHT_SHIFT" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.hbase.mapreduce.MultithreadedTableMapper"/>
+ <Or>
+ <Method name="MapRunner" />
+ </Or>
+ <Bug pattern="REC_CATCH_EXCEPTION" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.hbase.util.PoolMap$RoundRobinPool"/>
+ <Bug pattern="EQ_DOESNT_OVERRIDE_EQUALS" />
+ </Match>
+
</FindBugsFilter>
Modified: hbase/trunk/dev-support/test-patch.properties
URL: http://svn.apache.org/viewvc/hbase/trunk/dev-support/test-patch.properties?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/dev-support/test-patch.properties (original)
+++ hbase/trunk/dev-support/test-patch.properties Mon Apr 23 08:46:42 2012
@@ -19,5 +19,5 @@ MAVEN_OPTS="-Xmx3g"
# Please update the per-module test-patch.properties if you update this file.
OK_RELEASEAUDIT_WARNINGS=84
-OK_FINDBUGS_WARNINGS=549
+OK_FINDBUGS_WARNINGS=523
OK_JAVADOC_WARNINGS=169
Modified: hbase/trunk/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon (original)
+++ hbase/trunk/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon Mon Apr 23 08:46:42 2012
@@ -48,7 +48,6 @@ org.apache.hadoop.hbase.HBaseConfigurati
serverName = regionServer.getServerName();
RegionServerMetrics metrics = regionServer.getMetrics();
List<HRegionInfo> onlineRegions = regionServer.getOnlineRegions();
- int interval = regionServer.getConfiguration().getInt("hbase.regionserver.msginterval", 3000)/1000;
int masterInfoPort = regionServer.getConfiguration().getInt("hbase.master.info.port", 60010);
</%java>
<?xml version="1.0" encoding="UTF-8" ?>
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/HServerLoad.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/HServerLoad.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/HServerLoad.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/HServerLoad.java Mon Apr 23 08:46:42 2012
@@ -386,7 +386,7 @@ implements WritableComparable<HServerLoa
*/
private void readFields92(DataInput in) throws IOException {
// in 0.92, the version was actually written twice, consume the second copy
- int version = in.readByte();
+ in.readByte(); // version
int namelen = in.readInt();
this.name = new byte[namelen];
in.readFully(this.name);
@@ -503,7 +503,7 @@ implements WritableComparable<HServerLoa
float compactionProgressPct = Float.NaN;
if( this.totalCompactingKVs > 0 ) {
compactionProgressPct = Float.valueOf(
- this.currentCompactedKVs / this.totalCompactingKVs);
+ (float)this.currentCompactedKVs / this.totalCompactingKVs);
}
sb = Strings.appendKeyValue(sb, "compactionProgressPct",
compactionProgressPct);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java Mon Apr 23 08:46:42 2012
@@ -222,9 +222,18 @@ public class FilterList implements Filte
case INCLUDE:
rc = ReturnCode.INCLUDE;
// must continue here to evaluate all filters
+ break;
case NEXT_ROW:
+ break;
case SKIP:
// continue;
+ break;
+ case NEXT_COL:
+ break;
+ case SEEK_NEXT_USING_HINT:
+ break;
+ default:
+ throw new IllegalStateException("Received code is not valid.");
}
}
}
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java Mon Apr 23 08:46:42 2012
@@ -198,10 +198,6 @@ public abstract class AbstractHFileWrite
if (key == null || length <= 0) {
throw new IOException("Key cannot be null or empty");
}
- if (length > HFile.MAXIMUM_KEY_LENGTH) {
- throw new IOException("Key length " + length + " > "
- + HFile.MAXIMUM_KEY_LENGTH);
- }
if (lastKeyBuffer != null) {
int keyComp = comparator.compare(lastKeyBuffer, lastKeyOffset,
lastKeyLength, key, offset, length);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java Mon Apr 23 08:46:42 2012
@@ -883,7 +883,7 @@ public class HFileBlock extends SchemaCo
offset = Bytes.putLong(dest, offset, prevOffset);
offset = Bytes.putByte(dest, offset, checksumType.getCode());
offset = Bytes.putInt(dest, offset, bytesPerChecksum);
- offset = Bytes.putInt(dest, offset, onDiskDataSize);
+ Bytes.putInt(dest, offset, onDiskDataSize);
}
/**
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java Mon Apr 23 08:46:42 2012
@@ -671,7 +671,7 @@ public class LruBlockCache implements Bl
return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
((long)Math.ceil(maxSize*1.2/blockSize)
* ClassSize.CONCURRENT_HASHMAP_ENTRY) +
- (concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
+ ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
}
@Override
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java Mon Apr 23 08:46:42 2012
@@ -185,7 +185,6 @@ class WritableRpcEngine implements RpcEn
(VersionedProtocol) Proxy.newProxyInstance(
protocol.getClassLoader(), new Class[] { protocol },
new Invoker(protocol, addr, ticket, conf, factory, rpcTimeout));
- if (proxy instanceof VersionedProtocol) {
try {
long serverVersion = ((VersionedProtocol)proxy)
.getProtocolVersion(protocol.getName(), clientVersion);
@@ -206,7 +205,6 @@ class WritableRpcEngine implements RpcEn
}
throw (IOException)t;
}
- }
return proxy;
}
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java Mon Apr 23 08:46:42 2012
@@ -609,6 +609,9 @@ public class AssignmentManager extends Z
}
failoverProcessedRegions.put(encodedRegionName, regionInfo);
break;
+
+ default:
+ throw new IllegalStateException("Received event is not valid.");
}
}
}
@@ -898,6 +901,9 @@ public class AssignmentManager extends Z
new OpenedRegionHandler(master, this, regionState.getRegion(),
data.getOrigin(), expectedVersion));
break;
+
+ default:
+ throw new IllegalStateException("Received event is not valid.");
}
}
}
@@ -1990,7 +1996,6 @@ public class AssignmentManager extends Z
return;
} catch (KeeperException ke) {
LOG.error("Unexpected zk state", ke);
- ke = e;
}
}
// If we get here, don't understand whats going on -- abort.
@@ -2940,6 +2945,9 @@ public class AssignmentManager extends Z
"expire, send RPC again");
invokeUnassign(regionInfo);
break;
+
+ default:
+ throw new IllegalStateException("Received event is not valid.");
}
}
}
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java Mon Apr 23 08:46:42 2012
@@ -2268,7 +2268,7 @@ public class HRegion implements HeapSize
// Lock row
Integer lid = getLock(lockId, get.getRow(), true);
- List<KeyValue> result = new ArrayList<KeyValue>();
+ List<KeyValue> result = null;
try {
result = get(get, false);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java Mon Apr 23 08:46:42 2012
@@ -88,7 +88,7 @@ extends ConstantSizeRegionSplitPolicy {
long getSizeToCheck(final int tableRegionsCount) {
return tableRegionsCount == 0? getDesiredMaxFileSize():
Math.min(getDesiredMaxFileSize(),
- this.flushSize * (tableRegionsCount * tableRegionsCount));
+ this.flushSize * (tableRegionsCount * (long)tableRegionsCount));
}
/**
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java Mon Apr 23 08:46:42 2012
@@ -111,8 +111,7 @@ public class MemStoreLAB {
* != c
*/
private void tryRetireChunk(Chunk c) {
- @SuppressWarnings("unused")
- boolean weRetiredIt = curChunk.compareAndSet(c, null);
+ curChunk.compareAndSet(c, null);
// If the CAS succeeds, that means that we won the race
// to retire the chunk. We could use this opportunity to
// update metrics on external fragmentation.
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java Mon Apr 23 08:46:42 2012
@@ -1438,8 +1438,12 @@ public class StoreFile extends SchemaCon
*/
public boolean passesGeneralBloomFilter(byte[] row, int rowOffset,
int rowLen, byte[] col, int colOffset, int colLen) {
- if (generalBloomFilter == null)
+ // Cache Bloom filter as a local variable in case it is set to null by
+ // another thread on an IO error.
+ BloomFilter bloomFilter = this.generalBloomFilter;
+ if (bloomFilter == null) {
return true;
+ }
byte[] key;
switch (bloomFilterType) {
@@ -1456,7 +1460,7 @@ public class StoreFile extends SchemaCon
break;
case ROWCOL:
- key = generalBloomFilter.createBloomKey(row, rowOffset, rowLen, col,
+ key = bloomFilter.createBloomKey(row, rowOffset, rowLen, col,
colOffset, colLen);
break;
@@ -1464,14 +1468,6 @@ public class StoreFile extends SchemaCon
return true;
}
- // Cache Bloom filter as a local variable in case it is set to null by
- // another thread on an IO error.
- BloomFilter bloomFilter = this.generalBloomFilter;
-
- if (bloomFilter == null) {
- return true;
- }
-
// Empty file
if (reader.getTrailer().getEntryCount() == 0)
return false;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java Mon Apr 23 08:46:42 2012
@@ -50,6 +50,6 @@ public class CompactionProgress {
* @return float
*/
public float getProgressPct() {
- return currentCompactedKVs / totalCompactingKVs;
+ return (float)currentCompactedKVs / totalCompactingKVs;
}
}
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java Mon Apr 23 08:46:42 2012
@@ -246,7 +246,12 @@ public class ByteBloomFilter implements
}
private static int optimalFunctionCount(int maxKeys, long bitSize) {
- return (int) Math.ceil(Math.log(2) * (bitSize / maxKeys));
+ long i = bitSize / maxKeys;
+ double result = Math.ceil(Math.log(2) * i);
+ if (result > Integer.MAX_VALUE){
+ throw new IllegalArgumentException("result too large for integer value.");
+ }
+ return (int)result;
}
/** Private constructor used by other constructors. */
@@ -298,7 +303,7 @@ public class ByteBloomFilter implements
double errorRate, int hashType, int foldFactor) {
ByteBloomFilter bbf = new ByteBloomFilter(hashType);
- bbf.byteSize = computeFoldableByteSize(byteSizeHint * 8, foldFactor);
+ bbf.byteSize = computeFoldableByteSize(byteSizeHint * 8L, foldFactor);
long bitSize = bbf.byteSize * 8;
bbf.maxKeys = (int) idealMaxKeys(bitSize, errorRate);
bbf.hashCount = optimalFunctionCount(bbf.maxKeys, bitSize);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java Mon Apr 23 08:46:42 2012
@@ -92,7 +92,7 @@ public class CompoundBloomFilterWriter e
int hashType, int maxFold, boolean cacheOnWrite,
RawComparator<byte[]> comparator) {
chunkByteSize = ByteBloomFilter.computeFoldableByteSize(
- chunkByteSizeHint * 8, maxFold);
+ chunkByteSizeHint * 8L, maxFold);
this.errorRate = errorRate;
this.hashType = hashType;
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java Mon Apr 23 08:46:42 2012
@@ -84,7 +84,6 @@ public class CompressionTest {
}
}
- Configuration conf = HBaseConfiguration.create();
try {
Compressor c = algo.getCompressor();
algo.returnCompressor(c);
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java Mon Apr 23 08:46:42 2012
@@ -74,13 +74,9 @@ public class FSHDFSUtils extends FSUtils
while (!recovered) {
try {
try {
- if (fs instanceof DistributedFileSystem) {
- DistributedFileSystem dfs = (DistributedFileSystem)fs;
- DistributedFileSystem.class.getMethod("recoverLease",
- new Class[] {Path.class}).invoke(dfs, p);
- } else {
- throw new Exception("Not a DistributedFileSystem");
- }
+ DistributedFileSystem dfs = (DistributedFileSystem) fs;
+ DistributedFileSystem.class.getMethod("recoverLease", new Class[] { Path.class }).invoke(
+ dfs, p);
} catch (InvocationTargetException ite) {
// function was properly called, but threw it's own exception
throw (IOException) ite.getCause();
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java Mon Apr 23 08:46:42 2012
@@ -316,7 +316,7 @@ public class PoolMap<K, V> implements Ma
@Override
public R put(R resource) {
- if (size() < maxSize) {
+ if (super.size() < maxSize) {
add(resource);
}
return null;
@@ -355,7 +355,7 @@ public class PoolMap<K, V> implements Ma
@Override
public R put(R resource) {
- if (size() < maxSize) {
+ if (super.size() < maxSize) {
add(resource);
}
return null;
@@ -363,10 +363,10 @@ public class PoolMap<K, V> implements Ma
@Override
public R get() {
- if (size() < maxSize) {
+ if (super.size() < maxSize) {
return null;
}
- nextResource %= size();
+ nextResource %= super.size();
R resource = get(nextResource++);
return resource;
}
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java Mon Apr 23 08:46:42 2012
@@ -632,7 +632,6 @@ public class RegionSplitter {
// get table info
Path hbDir = new Path(table.getConfiguration().get(HConstants.HBASE_DIR));
Path tableDir = HTableDescriptor.getTableDir(hbDir, table.getTableName());
- Path splitFile = new Path(tableDir, "_balancedSplit");
FileSystem fs = FileSystem.get(table.getConfiguration());
// clear the cache to forcibly refresh region information
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java?rev=1329132&r1=1329131&r2=1329132&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java Mon Apr 23 08:46:42 2012
@@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAll_args;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
@@ -374,6 +375,12 @@ public class ZooKeeperWatcher implements
if (this.abortable != null) this.abortable.abort(msg,
new KeeperException.SessionExpiredException());
break;
+
+ case ConnectedReadOnly:
+ break;
+
+ default:
+ throw new IllegalStateException("Received event is not valid.");
}
}