You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/01/22 06:44:22 UTC
svn commit: r736569 - in /hadoop/hbase/trunk: ./
src/java/org/apache/hadoop/hbase/ipc/
src/java/org/apache/hadoop/hbase/mapred/
src/java/org/apache/hadoop/hbase/master/
src/java/org/apache/hadoop/hbase/master/metrics/
src/java/org/apache/hadoop/hbase/m...
Author: stack
Date: Wed Jan 21 21:44:21 2009
New Revision: 736569
URL: http://svn.apache.org/viewvc?rev=736569&view=rev
Log:
HBASE-876 There are a large number of Java warnings in HBase
Modified:
hadoop/hbase/trunk/CHANGES.txt
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/NotAllMetaRegionsOnlineException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogFlusher.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexedRegionServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalHLogManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MurmurHash.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Pair.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValue.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java
hadoop/hbase/trunk/src/java/org/onelab/filter/BloomFilter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/CountingBloomFilter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/DynamicBloomFilter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/Key.java
hadoop/hbase/trunk/src/java/org/onelab/filter/RetouchedBloomFilter.java
Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Wed Jan 21 21:44:21 2009
@@ -18,7 +18,8 @@
HBASE-1130 PrefixRowFilter (Michael Gottesman via Stack)
HBASE-1139 Update Clover in build.xml
HBASE-876 There are a large number of Java warnings in HBase; part 1,
- part 2, and part 3 (Evgeny Ryabitskiy via Stack)
+ part 2, part 3, part 4, part 5, part 6, part 7 and part 8
+ (Evgeny Ryabitskiy via Stack)
HBASE-896 Update jruby from 1.1.2 to 1.1.6
HBASE-1031 Add the Zookeeper jar
HBASE-1142 Cleanup thrift server; remove Text and profuse DEBUG messaging
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java Wed Jan 21 21:44:21 2009
@@ -25,7 +25,6 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil;
@@ -83,9 +82,9 @@
synchronized (metricsList) {
// Iterate through the rpcMetrics hashmap to propogate the different rpc metrics.
- Set keys = metricsList.keySet();
+ Set<String> keys = metricsList.keySet();
- Iterator keyIter = keys.iterator();
+ Iterator<String> keyIter = keys.iterator();
while (keyIter.hasNext()) {
Object key = keyIter.next();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java Wed Jan 21 21:44:21 2009
@@ -87,9 +87,9 @@
* Pass the new key and value to reduce.
* If any of the grouping columns are not found in the value, the record is skipped.
*/
- public void map(@SuppressWarnings("unused") ImmutableBytesWritable key,
- RowResult value, OutputCollector<ImmutableBytesWritable,RowResult> output,
- @SuppressWarnings("unused") Reporter reporter) throws IOException {
+ public void map(ImmutableBytesWritable key, RowResult value,
+ OutputCollector<ImmutableBytesWritable,RowResult> output,
+ Reporter reporter) throws IOException {
byte[][] keyVals = extractKeyValues(value);
if(keyVals != null) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java Wed Jan 21 21:44:21 2009
@@ -61,7 +61,7 @@
}
public int getPartition(ImmutableBytesWritable key,
- @SuppressWarnings("unused") V2 value, int numPartitions) {
+ V2 value, int numPartitions) {
byte[] region = null;
// Only one region return 0
if (this.startKeys.length == 1){
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java Wed Jan 21 21:44:21 2009
@@ -62,7 +62,7 @@
*/
public void map(ImmutableBytesWritable key, RowResult value,
OutputCollector<ImmutableBytesWritable,RowResult> output,
- @SuppressWarnings("unused") Reporter reporter) throws IOException {
+ Reporter reporter) throws IOException {
// convert
output.collect(key, value);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java Wed Jan 21 21:44:21 2009
@@ -45,7 +45,7 @@
*/
public void reduce(ImmutableBytesWritable key, Iterator<BatchUpdate> values,
OutputCollector<ImmutableBytesWritable, BatchUpdate> output,
- @SuppressWarnings("unused") Reporter reporter)
+ Reporter reporter)
throws IOException {
while(values.hasNext()) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexConfiguration.java Wed Jan 21 21:44:21 2009
@@ -20,7 +20,6 @@
package org.apache.hadoop.hbase.mapred;
import java.io.ByteArrayInputStream;
-import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
import java.util.concurrent.ConcurrentHashMap;
@@ -75,6 +74,9 @@
static final String HBASE_INDEX_OPTIMIZE = "hbase.index.optimize";
public static class ColumnConf extends Properties {
+
+ private static final long serialVersionUID = 7419012290580607821L;
+
boolean getBoolean(String name, boolean defaultValue) {
String valueString = getProperty(name);
if ("true".equals(valueString))
@@ -330,7 +332,7 @@
}
}
- public void write(OutputStream out) throws IOException {
+ public void write(OutputStream out) {
try {
Document doc = writeDocument();
DOMSource source = new DOMSource(doc);
@@ -402,6 +404,7 @@
}
}
+ @Override
public String toString() {
StringWriter writer = new StringWriter();
try {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java Wed Jan 21 21:44:21 2009
@@ -102,8 +102,8 @@
boolean closed;
private long docCount = 0;
- public void write(@SuppressWarnings("unused") ImmutableBytesWritable key,
- LuceneDocumentWrapper value)
+ public void write(ImmutableBytesWritable key,
+ LuceneDocumentWrapper value)
throws IOException {
// unwrap and index doc
Document doc = value.get();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java Wed Jan 21 21:44:21 2009
@@ -66,7 +66,7 @@
public void reduce(ImmutableBytesWritable key, Iterator<RowResult> values,
OutputCollector<ImmutableBytesWritable, LuceneDocumentWrapper> output,
- @SuppressWarnings("unused") Reporter reporter)
+ Reporter reporter)
throws IOException {
if (!values.hasNext()) {
return;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java Wed Jan 21 21:44:21 2009
@@ -21,8 +21,6 @@
import java.io.DataInput;
import java.io.DataOutput;
-import java.io.IOException; //TODO: remove
-
import org.apache.hadoop.io.Writable;
import org.apache.lucene.document.Document;
@@ -47,11 +45,11 @@
return doc;
}
- public void readFields(@SuppressWarnings("unused") DataInput in) {
+ public void readFields(DataInput in) {
// intentionally left blank
}
- public void write(@SuppressWarnings("unused") DataOutput out) {
+ public void write(DataOutput out) {
// intentionally left blank
}
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java Wed Jan 21 21:44:21 2009
@@ -59,7 +59,7 @@
public void map(ImmutableBytesWritable row, RowResult value,
OutputCollector<ImmutableBytesWritable, RowResult> output,
- @SuppressWarnings("unused") Reporter reporter)
+ Reporter reporter)
throws IOException {
boolean content = false;
for (Map.Entry<byte [], Cell> e: value.entrySet()) {
@@ -82,7 +82,7 @@
* @return the JobConf
* @throws IOException
*/
- @SuppressWarnings({ "unused", "deprecation" })
+ @SuppressWarnings("unused")
public JobConf createSubmittableJob(String[] args) throws IOException {
JobConf c = new JobConf(getConf(), RowCounter.class);
c.setJobName(NAME);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java Wed Jan 21 21:44:21 2009
@@ -36,6 +36,7 @@
*/
public class TableInputFormat extends TableInputFormatBase implements
JobConfigurable {
+ @SuppressWarnings("hiding")
private final Log LOG = LogFactory.getLog(TableInputFormat.class);
/**
@@ -62,7 +63,6 @@
}
}
- @SuppressWarnings("deprecation")
public void validateInput(JobConf job) throws IOException {
// expecting exactly one path
Path [] tableNames = FileInputFormat.getInputPaths(job);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java Wed Jan 21 21:44:21 2009
@@ -34,9 +34,7 @@
import org.apache.hadoop.hbase.filter.StopRowFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
@@ -74,7 +72,7 @@
*/
public abstract class TableInputFormatBase
implements InputFormat<ImmutableBytesWritable, RowResult> {
- private final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
+ final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
private byte [][] inputColumns;
private HTable table;
private TableRecordReader tableRecordReader;
@@ -203,7 +201,6 @@
* @return true if there was more data
* @throws IOException
*/
- @SuppressWarnings("unchecked")
public boolean next(ImmutableBytesWritable key, RowResult value)
throws IOException {
RowResult result;
@@ -215,13 +212,14 @@
this.scanner.next(); // skip presumed already mapped row
result = this.scanner.next();
}
- boolean hasMore = result != null && result.size() > 0;
- if (hasMore) {
+
+ if (result != null && result.size() > 0) {
key.set(result.getRow());
lastRow = key.get();
Writables.copyWritable(result, value);
+ return true;
}
- return hasMore;
+ return false;
}
}
@@ -232,10 +230,8 @@
* @see org.apache.hadoop.mapred.InputFormat#getRecordReader(InputSplit,
* JobConf, Reporter)
*/
- public RecordReader<ImmutableBytesWritable, RowResult> getRecordReader(InputSplit split,
- @SuppressWarnings("unused")
- JobConf job, @SuppressWarnings("unused")
- Reporter reporter)
+ public RecordReader<ImmutableBytesWritable, RowResult> getRecordReader(
+ InputSplit split, JobConf job, Reporter reporter)
throws IOException {
TableSplit tSplit = (TableSplit) split;
TableRecordReader trr = this.tableRecordReader;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java Wed Jan 21 21:44:21 2009
@@ -32,8 +32,7 @@
* @param <K> WritableComparable key class
* @param <V> Writable value class
*/
-@SuppressWarnings("unchecked")
-public interface TableMap<K extends WritableComparable, V extends Writable>
+public interface TableMap<K extends WritableComparable<K>, V extends Writable>
extends Mapper<ImmutableBytesWritable, RowResult, K, V> {
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java Wed Jan 21 21:44:21 2009
@@ -63,12 +63,12 @@
m_table = table;
}
- public void close(@SuppressWarnings("unused") Reporter reporter)
+ public void close(Reporter reporter)
throws IOException {
m_table.flushCommits();
}
- public void write(@SuppressWarnings("unused") ImmutableBytesWritable key,
+ public void write(ImmutableBytesWritable key,
BatchUpdate value) throws IOException {
m_table.commit(new BatchUpdate(value));
}
@@ -76,11 +76,8 @@
@Override
@SuppressWarnings("unchecked")
- public RecordWriter getRecordWriter(
- @SuppressWarnings("unused") FileSystem ignored,
- JobConf job,
- @SuppressWarnings("unused") String name,
- @SuppressWarnings("unused") Progressable progress) throws IOException {
+ public RecordWriter getRecordWriter(FileSystem ignored,
+ JobConf job, String name, Progressable progress) throws IOException {
// expecting exactly one path
@@ -97,7 +94,6 @@
}
@Override
- @SuppressWarnings("unused")
public void checkOutputSpecs(FileSystem ignored, JobConf job)
throws FileAlreadyExistsException, InvalidJobConfException, IOException {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java Wed Jan 21 21:44:21 2009
@@ -30,7 +30,7 @@
/**
* A table split corresponds to a key range [low, high)
*/
-public class TableSplit implements InputSplit, Comparable {
+public class TableSplit implements InputSplit, Comparable<TableSplit> {
private byte [] m_tableName;
private byte [] m_startRow;
private byte [] m_endRow;
@@ -106,8 +106,7 @@
Bytes.toString(m_startRow) + "," + Bytes.toString(m_endRow);
}
- public int compareTo(Object arg) {
- TableSplit other = (TableSplit)arg;
- return Bytes.compareTo(getStartRow(), other.getStartRow());
+ public int compareTo(TableSplit o) {
+ return Bytes.compareTo(getStartRow(), o.getStartRow());
}
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ColumnOperation.java Wed Jan 21 21:44:21 2009
@@ -38,18 +38,15 @@
}
@Override
- protected void processScanItem(
- @SuppressWarnings("unused") String serverName,
- @SuppressWarnings("unused") long startCode, final HRegionInfo info)
- throws IOException {
+ protected void processScanItem(String serverName, long startCode,
+ final HRegionInfo info) throws IOException {
if (isEnabled(info)) {
throw new TableNotDisabledException(tableName);
}
}
protected void updateRegionInfo(HRegionInterface server, byte [] regionName,
- HRegionInfo i)
- throws IOException {
+ HRegionInfo i) throws IOException {
BatchUpdate b = new BatchUpdate(i.getRegionName());
b.put(COL_REGIONINFO, Writables.getBytes(i));
server.batchUpdate(regionName, b, -1L);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java Wed Jan 21 21:44:21 2009
@@ -94,8 +94,8 @@
static final Log LOG = LogFactory.getLog(HMaster.class.getName());
- public long getProtocolVersion(@SuppressWarnings("unused") String protocol,
- @SuppressWarnings("unused") long clientVersion) {
+ public long getProtocolVersion(String protocol,
+ long clientVersion) {
return HBaseRPCProtocolVersion.versionID;
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyTableMeta.java Wed Jan 21 21:44:21 2009
@@ -57,10 +57,8 @@
}
@Override
- protected void processScanItem(
- @SuppressWarnings("unused") String serverName,
- @SuppressWarnings("unused") long startCode, final HRegionInfo info)
- throws IOException {
+ protected void processScanItem(String serverName, long startCode,
+ final HRegionInfo info) throws IOException {
if (isEnabled(info)) {
throw new TableNotDisabledException(tableName.toString());
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/NotAllMetaRegionsOnlineException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/NotAllMetaRegionsOnlineException.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/NotAllMetaRegionsOnlineException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/NotAllMetaRegionsOnlineException.java Wed Jan 21 21:44:21 2009
@@ -25,8 +25,9 @@
/**
* Thrown when an operation requires the root and all meta regions to be online
*/
-@SuppressWarnings("serial")
public class NotAllMetaRegionsOnlineException extends DoNotRetryIOException {
+ private static final long serialVersionUID = 6439786157874827523L;
+
/**
* default constructor
*/
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java Wed Jan 21 21:44:21 2009
@@ -28,14 +28,13 @@
class RootScanner extends BaseScanner {
/**
* Constructor
- *
* @param master
*/
public RootScanner(HMaster master) {
super(master, true, master.metaRescanInterval, master.closed);
}
- /*
+ /**
* Don't retry if we get an error while scanning. Errors are most often
*
* caused by the server going away. Wait until next rescan interval when
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java Wed Jan 21 21:44:21 2009
@@ -50,7 +50,7 @@
* load numbers, dying servers, etc.
*/
class ServerManager implements HConstants {
- private static final Log LOG =
+ static final Log LOG =
LogFactory.getLog(ServerManager.class.getName());
private final AtomicInteger quiescedServers = new AtomicInteger(0);
@@ -101,7 +101,7 @@
getInt("hbase.regions.nobalancing.count", 4);
}
- /*
+ /**
* Look to see if we have ghost references to this regionserver such as
* still-existing leases or if regionserver is on the dead servers list
* getting its logs processed.
@@ -337,7 +337,8 @@
}
}
- /* RegionServer is checking in, no exceptional circumstances
+ /**
+ * RegionServer is checking in, no exceptional circumstances
* @param serverName
* @param serverInfo
* @param mostLoadedRegions
@@ -739,7 +740,6 @@
/** Instantiated to monitor the health of a region server */
private class ServerExpirer implements LeaseListener {
- @SuppressWarnings("hiding")
private String server;
ServerExpirer(String server) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableDelete.java Wed Jan 21 21:44:21 2009
@@ -41,10 +41,8 @@
}
@Override
- protected void processScanItem(
- @SuppressWarnings("unused") String serverName,
- @SuppressWarnings("unused") long startCode,
- final HRegionInfo info) throws IOException {
+ protected void processScanItem(String serverName,
+ long startCode, final HRegionInfo info) throws IOException {
if (isEnabled(info)) {
throw new TableNotDisabledException(tableName);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java Wed Jan 21 21:44:21 2009
@@ -62,7 +62,7 @@
* Since this object is a registered updater, this method will be called
* periodically, e.g. every 5 seconds.
*/
- public void doUpdates(@SuppressWarnings("unused") MetricsContext unused) {
+ public void doUpdates(MetricsContext unused) {
synchronized (this) {
synchronized(this.cluster_requests) {
this.cluster_requests.pushMetric(metricsRecord);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java Wed Jan 21 21:44:21 2009
@@ -46,6 +46,7 @@
this.sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
}
+ @Override
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
String fileName = getAttribute(FILE_NAME_PROPERTY);
@@ -54,6 +55,7 @@
}
}
+ @Override
public void startMonitoring() throws IOException {
if (file == null) {
writer = new PrintWriter(new BufferedOutputStream(System.out));
@@ -63,6 +65,7 @@
super.startMonitoring();
}
+ @Override
public void stopMonitoring() {
super.stopMonitoring();
if (writer != null) {
@@ -75,6 +78,7 @@
return this.sdf.format(new Date());
}
+ @Override
public void emitRecord(String contextName, String recordName,
OutputRecord outRec) {
writer.print(iso8601());
@@ -100,6 +104,7 @@
writer.println();
}
+ @Override
public void flush() {
writer.flush();
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java Wed Jan 21 21:44:21 2009
@@ -49,7 +49,6 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.SequenceFile.Metadata;
import org.apache.hadoop.io.SequenceFile.Reader;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java Wed Jan 21 21:44:21 2009
@@ -33,7 +33,7 @@
*
* Some Transactional edits (START, COMMIT, ABORT) will not have an associated row.
*/
-public class HLogKey implements WritableComparable {
+public class HLogKey implements WritableComparable<HLogKey> {
private byte [] regionName;
private byte [] tablename;
private byte [] row;
@@ -94,7 +94,7 @@
@Override
public boolean equals(Object obj) {
- return compareTo(obj) == 0;
+ return compareTo((HLogKey)obj) == 0;
}
@Override
@@ -109,19 +109,18 @@
// Comparable
//
- public int compareTo(Object o) {
- HLogKey other = (HLogKey) o;
- int result = Bytes.compareTo(this.regionName, other.regionName);
+ public int compareTo(HLogKey o) {
+ int result = Bytes.compareTo(this.regionName, o.regionName);
if(result == 0) {
- result = Bytes.compareTo(this.row, other.row);
+ result = Bytes.compareTo(this.row, o.row);
if(result == 0) {
- if (this.logSeqNum < other.logSeqNum) {
+ if (this.logSeqNum < o.logSeqNum) {
result = -1;
- } else if (this.logSeqNum > other.logSeqNum) {
+ } else if (this.logSeqNum > o.logSeqNum) {
result = 1;
}
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Wed Jan 21 21:44:21 2009
@@ -1588,8 +1588,7 @@
}
public void batchUpdate(final byte [] regionName, BatchUpdate b,
- @SuppressWarnings("unused") long lockId)
- throws IOException {
+ long lockId) throws IOException {
if (b.getRow() == null)
throw new IllegalArgumentException("update has null row");
@@ -2132,7 +2131,7 @@
}
public long getProtocolVersion(final String protocol,
- @SuppressWarnings("unused") final long clientVersion)
+ final long clientVersion)
throws IOException {
if (protocol.equals(HRegionInterface.class.getName())) {
return HBaseRPCProtocolVersion.versionID;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogFlusher.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogFlusher.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogFlusher.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogFlusher.java Wed Jan 21 21:44:21 2009
@@ -48,6 +48,7 @@
}
}
+ @Override
protected void chore() {
synchronized (log) {
HLog hlog = log.get();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java Wed Jan 21 21:44:21 2009
@@ -258,7 +258,6 @@
* @param b
* @return Return lowest of a or b or null if both a and b are null
*/
- @SuppressWarnings("unchecked")
private byte [] getLowest(final byte [] a,
final byte [] b) {
if (a == null) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java Wed Jan 21 21:44:21 2009
@@ -24,8 +24,9 @@
/**
* Thrown if request for nonexistent column family.
*/
-@SuppressWarnings("serial")
public class NoSuchColumnFamilyException extends DoNotRetryIOException {
+ private static final long serialVersionUID = -6569952730832331274L;
+
/** default constructor */
public NoSuchColumnFamilyException() {
super();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java Wed Jan 21 21:44:21 2009
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.regionserver.metrics;
import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import org.apache.commons.logging.Log;
@@ -96,7 +95,7 @@
* Since this object is a registered updater, this method will be called
* periodically, e.g. every 5 seconds.
*/
- public void doUpdates(@SuppressWarnings("unused") MetricsContext unused) {
+ public void doUpdates(MetricsContext unused) {
synchronized (this) {
this.stores.pushMetric(this.metricsRecord);
this.storefiles.pushMetric(this.metricsRecord);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexedRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexedRegionServer.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexedRegionServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexedRegionServer.java Wed Jan 21 21:44:21 2009
@@ -27,8 +27,6 @@
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.ipc.IndexedRegionInterface;
import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.transactional.TransactionalRegion;
import org.apache.hadoop.hbase.regionserver.transactional.TransactionalRegionServer;
import org.apache.hadoop.util.Progressable;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalHLogManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalHLogManager.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalHLogManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalHLogManager.java Wed Jan 21 21:44:21 2009
@@ -49,8 +49,6 @@
/**
* Responsible for writing and reading (recovering) transactional information
* to/from the HLog.
- *
- *
*/
class TransactionalHLogManager {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java Wed Jan 21 21:44:21 2009
@@ -83,7 +83,7 @@
private static final int DEFAULT_OLD_TRANSACTION_FLUSH = 100; // Do a flush if we have this many old transactions..
- private static final Log LOG = LogFactory.getLog(TransactionalRegion.class);
+ static final Log LOG = LogFactory.getLog(TransactionalRegion.class);
// Collection of active transactions (PENDING) keyed by id.
private Map<String, TransactionState> transactionsById = new HashMap<String, TransactionState>();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java Wed Jan 21 21:44:21 2009
@@ -8,7 +8,6 @@
import java.util.Comparator;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
@@ -239,9 +238,7 @@
*/
public static boolean equals(final byte [] left, final byte [] right) {
return left == null && right == null? true:
- left == null && right != null? false:
- left != null && right == null? false:
- left.length != right.length? false:
+ (left == null || right == null || (left.length != right.length))? false:
compareTo(left, right) == 0;
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java Wed Jan 21 21:44:21 2009
@@ -78,6 +78,7 @@
* <p>Use for hash table lookup, or anything where one collision in 2^^32 is
* acceptable. Do NOT use for cryptographic purposes.
*/
+ @Override
@SuppressWarnings("fallthrough")
public int hash(byte[] key, int nbytes, int initval) {
int length = nbytes;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java Wed Jan 21 21:44:21 2009
@@ -20,7 +20,6 @@
package org.apache.hadoop.hbase.util;
-import java.io.FileNotFoundException; //TODO: remove
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
@@ -430,7 +429,6 @@
scanRootRegion(new ScannerListener() {
private final Log SL_LOG = LogFactory.getLog(this.getClass());
- @SuppressWarnings("unused")
public boolean processRow(HRegionInfo info) throws IOException {
SL_LOG.debug("Testing " + info);
if (Bytes.equals(info.getTableDesc().getName(),
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MurmurHash.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MurmurHash.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MurmurHash.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MurmurHash.java Wed Jan 21 21:44:21 2009
@@ -32,6 +32,7 @@
return _instance;
}
+ @Override
public int hash(byte[] data, int length, int seed) {
int m = 0x5bd1e995;
int r = 24;
@@ -62,13 +63,13 @@
if (left != 0) {
if (left >= 3) {
- h ^= (int) data[length - 3] << 16;
+ h ^= data[length - 3] << 16;
}
if (left >= 2) {
- h ^= (int) data[length - 2] << 8;
+ h ^= data[length - 2] << 8;
}
if (left >= 1) {
- h ^= (int) data[length - 1];
+ h ^= data[length - 1];
}
h *= m;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Pair.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Pair.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Pair.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Pair.java Wed Jan 21 21:44:21 2009
@@ -68,6 +68,7 @@
return (x == null && y == null) || (x != null && x.equals(y));
}
+ @Override
@SuppressWarnings("unchecked")
public boolean equals(Object other)
{
@@ -75,6 +76,7 @@
equals(second, ((Pair)other).second);
}
+ @Override
public int hashCode()
{
if (first == null)
@@ -85,6 +87,7 @@
return first.hashCode() * 17 + second.hashCode();
}
+ @Override
public String toString()
{
return "{" + getFirst() + "," + getSecond() + "}";
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValue.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValue.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValue.java Wed Jan 21 21:44:21 2009
@@ -43,7 +43,7 @@
return get();
}
- public V setValue(@SuppressWarnings("unused") V value) {
+ public V setValue(V value) {
throw new RuntimeException("Not implemented");
}
}
\ No newline at end of file
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueMap.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueMap.java Wed Jan 21 21:44:21 2009
@@ -68,7 +68,7 @@
}
@SuppressWarnings("unchecked")
- public void putAll(@SuppressWarnings("unused") Map map) {
+ public void putAll(Map map) {
throw new RuntimeException("Not implemented");
}
@@ -96,7 +96,7 @@
return this.internalMap.containsKey(key);
}
- public boolean containsValue(@SuppressWarnings("unused") Object value) {
+ public boolean containsValue(Object value) {
/* checkReferences();
return internalMap.containsValue(value);*/
throw new UnsupportedOperationException("Don't support containsValue!");
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java Wed Jan 21 21:44:21 2009
@@ -82,7 +82,7 @@
}
@SuppressWarnings("unchecked")
- public void putAll(@SuppressWarnings("unused") Map map) {
+ public void putAll(Map map) {
throw new RuntimeException("Not implemented");
}
@@ -110,7 +110,7 @@
return this.internalMap.containsKey(key);
}
- public boolean containsValue(@SuppressWarnings("unused") Object value) {
+ public boolean containsValue(Object value) {
/* checkReferences();
return internalMap.containsValue(value);*/
throw new UnsupportedOperationException("Don't support containsValue!");
Modified: hadoop/hbase/trunk/src/java/org/onelab/filter/BloomFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/onelab/filter/BloomFilter.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/onelab/filter/BloomFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/onelab/filter/BloomFilter.java Wed Jan 21 21:44:21 2009
@@ -50,10 +50,8 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-
import java.util.BitSet;
-import org.apache.hadoop.hbase.util.Hash;
/**
* Implements a <i>Bloom filter</i>, as defined by Bloom in 1970.
Modified: hadoop/hbase/trunk/src/java/org/onelab/filter/CountingBloomFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/onelab/filter/CountingBloomFilter.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/onelab/filter/CountingBloomFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/onelab/filter/CountingBloomFilter.java Wed Jan 21 21:44:21 2009
@@ -50,9 +50,6 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.util.Arrays; //TODO: remove
-
-import org.apache.hadoop.hbase.util.Hash;
/**
* Implements a <i>counting Bloom filter</i>, as defined by Fan et al. in a ToN
@@ -228,9 +225,8 @@
}
if (res != Integer.MAX_VALUE) {
return res;
- } else {
- return 0;
}
+ return 0;
}
@Override
@@ -257,7 +253,6 @@
}//end or()
@Override
- @SuppressWarnings("unused")
public void xor(Filter filter){
throw new UnsupportedOperationException("xor() is undefined for "
+ this.getClass().getName());
@@ -289,7 +284,7 @@
CountingBloomFilter cbf = new CountingBloomFilter(vectorSize, nbHash, hashType);
cbf.buckets = this.buckets.clone();
return cbf;
- }//end clone()
+ }
// Writable
@@ -311,4 +306,4 @@
buckets[i] = in.readLong();
}
}
-}//end class
+}
\ No newline at end of file
Modified: hadoop/hbase/trunk/src/java/org/onelab/filter/DynamicBloomFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/onelab/filter/DynamicBloomFilter.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/onelab/filter/DynamicBloomFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/onelab/filter/DynamicBloomFilter.java Wed Jan 21 21:44:21 2009
@@ -51,8 +51,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import org.apache.hadoop.hbase.util.Hash;
-
/**
* Implements a <i>dynamic Bloom filter</i>, as defined in the INFOCOM 2006 paper.
* <p>
Modified: hadoop/hbase/trunk/src/java/org/onelab/filter/Key.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/onelab/filter/Key.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/onelab/filter/Key.java (original)
+++ hadoop/hbase/trunk/src/java/org/onelab/filter/Key.java Wed Jan 21 21:44:21 2009
@@ -58,7 +58,7 @@
*
* @see org.onelab.filter.Filter The general behavior of a filter
*/
-public class Key implements WritableComparable {
+public class Key implements WritableComparable<Key> {
/** Byte value of key */
byte[] bytes;
@@ -131,7 +131,7 @@
@Override
public boolean equals(Object o) {
- return this.compareTo(o) == 0;
+ return this.compareTo((Key)o) == 0;
}
@Override
@@ -160,16 +160,14 @@
// Comparable
- public int compareTo(Object o) {
- Key other = (Key)o;
-
- int result = this.bytes.length - other.getBytes().length;
+ public int compareTo(Key o) {
+ int result = this.bytes.length - o.getBytes().length;
for(int i = 0; result == 0 && i < bytes.length; i++) {
- result = this.bytes[i] - other.bytes[i];
+ result = this.bytes[i] - o.bytes[i];
}
if(result == 0) {
- result = Double.valueOf(this.weight - other.weight).intValue();
+ result = Double.valueOf(this.weight - o.weight).intValue();
}
return result;
}
Modified: hadoop/hbase/trunk/src/java/org/onelab/filter/RetouchedBloomFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/onelab/filter/RetouchedBloomFilter.java?rev=736569&r1=736568&r2=736569&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/onelab/filter/RetouchedBloomFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/onelab/filter/RetouchedBloomFilter.java Wed Jan 21 21:44:21 2009
@@ -56,8 +56,6 @@
import java.util.List;
import java.util.Random;
-import org.apache.hadoop.hbase.util.Hash;
-
/**
* Implements a <i>retouched Bloom filter</i>, as defined in the CoNEXT 2006 paper.
* <p>