You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2009/01/20 07:32:37 UTC

svn commit: r735946 - in /hadoop/hbase/trunk: ./ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/client/tableindexed/ src/java/org/apache/hadoop/hbase/client/transactional/ src/java/org/apache/hadoop/hbase/filter/ src/java/org...

Author: stack
Date: Mon Jan 19 22:32:36 2009
New Revision: 735946

URL: http://svn.apache.org/viewvc?rev=735946&view=rev
Log:
HBASE-876 There are a large number of Java warnings in HBase

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Mon Jan 19 22:32:36 2009
@@ -12,8 +12,8 @@
                (Samuel Guo via Stack)
    HBASE-1130  PrefixRowFilter (Michael Gottesman via Stack)
    HBASE-1139  Update Clover in build.xml
-   HBASE-876   There are a large number of Java warnings in HBase; part 1
-               (Evgeny Ryabitskiy via Stack)
+   HBASE-876   There are a large number of Java warnings in HBase; part 1,
+               part 2, and part 3 (Evgeny Ryabitskiy via Stack)
 
 Release 0.19.0 - Unreleased
   INCOMPATIBLE CHANGES

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java Mon Jan 19 22:32:36 2009
@@ -33,7 +33,6 @@
 import org.apache.hadoop.hbase.MasterNotRunningException;
 import org.apache.hadoop.hbase.RegionException;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
-import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
@@ -45,7 +44,6 @@
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.util.Shell.ExitCodeException;
 
 /**
  * Provides administrative functions for HBase
@@ -133,9 +131,10 @@
   }
   
   private long getPauseTime(int tries) {
-    if (tries >= HConstants.RETRY_BACKOFF.length)
-      tries = HConstants.RETRY_BACKOFF.length - 1;
-    return this.pause * HConstants.RETRY_BACKOFF[tries];
+	int triesCount = tries;
+    if (triesCount >= HConstants.RETRY_BACKOFF.length)
+    	triesCount = HConstants.RETRY_BACKOFF.length - 1;
+    return this.pause * HConstants.RETRY_BACKOFF[triesCount];
   }
 
   /**
@@ -534,8 +533,10 @@
     int xtraArgsCount = 1;
     Object [] newargs = new Object[len + xtraArgsCount];
     newargs[0] = regionname;
-    for (int i = 0; i < len; i++) {
-      newargs[i + xtraArgsCount] = args[i];
+    if(args != null) {
+      for (int i = 0; i < len; i++) {
+        newargs[i + xtraArgsCount] = args[i];
+      }
     }
     modifyTable(HConstants.META_TABLE_NAME, HConstants.MODIFY_CLOSE_REGION,
       newargs);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Mon Jan 19 22:32:36 2009
@@ -875,20 +875,21 @@
     getRegionLocationForRowWithRetries(byte[] tableName, byte[] rowKey, 
       boolean reload)
     throws IOException {
+      boolean reloadFlag = reload;
       getMaster();
       List<Throwable> exceptions = new ArrayList<Throwable>();
       HRegionLocation location = null;
       int tries = 0;
       while (tries < numRetries) {
         try {
-          location = getRegionLocation(tableName, rowKey, reload);
+          location = getRegionLocation(tableName, rowKey, reloadFlag);
         } catch (Throwable t) {
           exceptions.add(t);
         }
         if (location != null) {
           break;
         }
-        reload = true;
+        reloadFlag = true;
         tries++;
         try {
           Thread.sleep(getPauseTime(tries));

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java Mon Jan 19 22:32:36 2009
@@ -7,7 +7,6 @@
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HStoreKey;
 import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.util.Bytes;      //TODO: remove
 
 /**
  * Scanner class that contains the <code>.META.</code> table scanning logic 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java Mon Jan 19 22:32:36 2009
@@ -25,8 +25,10 @@
 /**
  * Thrown when a scanner has timed out. 
  */
-@SuppressWarnings("serial")
 public class ScannerTimeoutException extends DoNotRetryIOException {
+
+  private static final long serialVersionUID = 8788838690290688313L;
+
   /** default constructor */
   ScannerTimeoutException() {
     super();

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java Mon Jan 19 22:32:36 2009
@@ -15,55 +15,46 @@
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setValue(byte[] key, byte[] value) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setValue(String key, String value) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setMaxVersions(int maxVersions) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setInMemory(boolean inMemory) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setBlockCacheEnabled(boolean blockCacheEnabled) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setMaxValueLength(int maxLength) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setTimeToLive(int timeToLive) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setCompressionType(CompressionType type) {
     throw new UnsupportedOperationException("HColumnDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setMapFileIndexInterval(int interval) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java Mon Jan 19 22:32:36 2009
@@ -75,37 +75,31 @@
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setInMemory(boolean inMemory) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setReadOnly(boolean readOnly) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setValue(byte[] key, byte[] value) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setValue(String key, String value) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setMaxFileSize(long maxFileSize) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void setMemcacheFlushSize(int memcacheFlushSize) {
     throw new UnsupportedOperationException("HTableDescriptor is read-only");
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java Mon Jan 19 22:32:36 2009
@@ -137,7 +137,6 @@
   }
 
   /** {@inheritDoc} */
-  @SuppressWarnings("unchecked")
   public void readFields(DataInput in) throws IOException {
     indexId = in.readUTF();
     int numIndexedCols = in.readInt();

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java Mon Jan 19 22:32:36 2009
@@ -48,7 +48,7 @@
   public static final byte[] INDEX_BASE_ROW_COLUMN = Bytes.add(
       INDEX_COL_FAMILY, Bytes.toBytes("ROW"));
 
-  private static final Log LOG = LogFactory.getLog(IndexedTable.class);
+  static final Log LOG = LogFactory.getLog(IndexedTable.class);
 
   private Map<String, HTable> indexIdToTable = new HashMap<String, HTable>();
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java Mon Jan 19 22:32:36 2009
@@ -29,7 +29,6 @@
 import org.apache.hadoop.hbase.HStoreKey;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MasterNotRunningException;
-import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.util.Bytes;
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java Mon Jan 19 22:32:36 2009
@@ -22,9 +22,10 @@
 /** Thrown when a transaction cannot be committed. 
  * 
  */
-@SuppressWarnings("serial")
 public class CommitUnsuccessfulException extends Exception {
 
+  private static final long serialVersionUID = 7062921444531109202L;
+
   /** Default Constructor */
   public CommitUnsuccessfulException() {
     super();

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java Mon Jan 19 22:32:36 2009
@@ -24,8 +24,9 @@
 /**
  * Thrown if a region server is passed an unknown transaction id
  */
-@SuppressWarnings("serial")
- public class UnknownTransactionException extends DoNotRetryIOException {
+public class UnknownTransactionException extends DoNotRetryIOException {
+
+  private static final long serialVersionUID = 698575374929591099L;
 
   /** constructor */
   public UnknownTransactionException() {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java Mon Jan 19 22:32:36 2009
@@ -122,11 +122,11 @@
    this.filterIfColumnMissing = filterIfColumnMissing;
  }
 
-  public boolean filterRowKey(@SuppressWarnings("unused") final byte[] rowKey) {
+  public boolean filterRowKey(final byte[] rowKey) {
     return false;
   }
 
-  public boolean filterColumn(@SuppressWarnings("unused") final byte[] rowKey,
+  public boolean filterColumn(final byte[] rowKey,
       final byte[] colKey, final byte[] data) {
     if (!filterIfColumnMissing) {
       return false; // Must filter on the whole row
@@ -201,12 +201,12 @@
     // Nothing.
   }
 
-  public void rowProcessed(@SuppressWarnings("unused") final boolean filtered,
-      @SuppressWarnings("unused") final byte[] key) {
+  public void rowProcessed(final boolean filtered,
+      final byte[] key) {
     // Nothing
   }
 
-  public void validate(@SuppressWarnings("unused") final byte[][] columns) {
+  public void validate(final byte[][] columns) {
     // Nothing
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java Mon Jan 19 22:32:36 2009
@@ -61,7 +61,7 @@
     this.pageSize = pageSize;
   }
 
-  public void validate(@SuppressWarnings("unused") final byte [][] columns) {
+  public void validate(final byte [][] columns) {
     // Doesn't filter columns
   }
 
@@ -70,7 +70,7 @@
   }
 
   public void rowProcessed(boolean filtered,
-      @SuppressWarnings("unused") byte [] rowKey) {
+      byte [] rowKey) {
     if (!filtered) {
       this.rowsAccepted++;
     }
@@ -84,18 +84,17 @@
     return this.rowsAccepted > this.pageSize;
   }
 
-  public boolean filterRowKey(@SuppressWarnings("unused") final byte [] r) {
+  public boolean filterRowKey(final byte [] r) {
     return filterAllRemaining();
   }
 
-  public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey,
-    @SuppressWarnings("unused") final byte [] colKey,
-    @SuppressWarnings("unused") final byte[] data) {
+  public boolean filterColumn(final byte [] rowKey,
+    final byte [] colKey,
+    final byte[] data) {
     return filterAllRemaining();
   }
 
-  public boolean filterRow(@SuppressWarnings("unused")
-      final SortedMap<byte [], Cell> columns) {
+  public boolean filterRow(final SortedMap<byte [], Cell> columns) {
     return filterAllRemaining();
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java Mon Jan 19 22:32:36 2009
@@ -44,15 +44,12 @@
    * Default Constructor, filters nothing. Required for RPC
    * deserialization
    */
-  @SuppressWarnings("unused")
   public PrefixRowFilter() { }
   
-  @SuppressWarnings("unused")
   public void reset() {
     // Nothing to reset
   }
-  
-  @SuppressWarnings("unused")
+
   public void rowProcessed(boolean filtered, byte [] key) {
     // does not care
   }
@@ -76,18 +73,15 @@
     return false;
   }
 
-  @SuppressWarnings("unused")
   public boolean filterColumn(final byte [] rowKey, final byte [] colunmName,
       final byte[] columnValue) {
     return false;
   }
 
-  @SuppressWarnings("unused")
   public boolean filterRow(final SortedMap<byte [], Cell> columns) {
     return false;
   }
 
-  @SuppressWarnings("unused")
   public void validate(final byte [][] columns) {
     // does not do this
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java Mon Jan 19 22:32:36 2009
@@ -86,7 +86,6 @@
     this.setColumnFilters(columnFilter);
   }
   
-  @SuppressWarnings("unused")
   public void rowProcessed(boolean filtered, byte [] rowKey) {
     //doesn't care
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java Mon Jan 19 22:32:36 2009
@@ -61,7 +61,7 @@
     return this.stopRowKey;
   }
 
-  public void validate(@SuppressWarnings("unused") final byte [][] columns) {
+  public void validate(final byte [][] columns) {
     // Doesn't filter columns
   }
 
@@ -69,7 +69,6 @@
     // Nothing to reset
   }
 
-  @SuppressWarnings("unused")
   public void rowProcessed(boolean filtered, byte [] rowKey) {
     // Doesn't care
   }
@@ -96,9 +95,8 @@
    * Because StopRowFilter does not examine column information, this method 
    * defaults to calling the rowKey-only version of filter.
    */
-  public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey,
-    @SuppressWarnings("unused") final byte [] colKey,
-    @SuppressWarnings("unused") final byte[] data) {
+  public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
+      final byte[] data) {
     return filterRowKey(rowKey);
   }
 
@@ -106,8 +104,7 @@
    * Because StopRowFilter does not examine column information, this method 
    * defaults to calling filterAllRemaining().
    */
-  public boolean filterRow(@SuppressWarnings("unused")
-      final SortedMap<byte [], Cell> columns) {
+  public boolean filterRow(final SortedMap<byte [], Cell> columns) {
     return filterAllRemaining();
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java Mon Jan 19 22:32:36 2009
@@ -94,6 +94,8 @@
     // A memory-sensitive map that has soft references to values
     this.blocks = new SoftValueMap<Long, byte []>() {
       private long hits, misses;
+      
+      @Override
       public byte [] get(Object key) {
         byte [] value = super.get(key);
         if (value == null) {
@@ -140,7 +142,6 @@
   }
 
   @Override
-  @SuppressWarnings("unused")
   public synchronized boolean seekToNewSource(long targetPos)
       throws IOException {
     return false;
@@ -234,7 +235,6 @@
   }
 
   @Override
-  @SuppressWarnings("unused")
   public void mark(int readLimit) {
     // Do nothing
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java Mon Jan 19 22:32:36 2009
@@ -42,8 +42,11 @@
  * tested first against bloom filter. Keys are HStoreKey.  If passed bloom
  * filter is null, just passes invocation to parent.
  */
+// TODO should be fixed generic warnings from MapFile methods
+@SuppressWarnings("unchecked")
 public class BloomFilterMapFile extends HBaseMapFile {
-  private static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class);
+  @SuppressWarnings("hiding")
+  static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class);
   protected static final String BLOOMFILTER_FILE_NAME = "filter";
 
   public static class Reader extends HBaseReader {
@@ -148,7 +151,6 @@
      * @param hri
      * @throws IOException
      */
-    @SuppressWarnings("unchecked")
     public Writer(Configuration conf, FileSystem fs, String dirName,
       SequenceFile.CompressionType compression, final boolean filter,
       int nrows, final HRegionInfo hri)

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java Mon Jan 19 22:32:36 2009
@@ -21,8 +21,6 @@
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -35,7 +33,8 @@
  * HBase customizations of MapFile.
  */
 public class HBaseMapFile extends MapFile {
-  private static final Log LOG = LogFactory.getLog(HBaseMapFile.class);
+  // TODO not used. remove?!
+  //  private static final Log LOG = LogFactory.getLog(HBaseMapFile.class);
   
   /**
    * Values are instances of this class.

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java Mon Jan 19 22:32:36 2009
@@ -43,6 +43,7 @@
  * 
  * <p>This file is not splitable.  Calls to {@link #midKey()} return null.
  */
+//TODO should be fixed generic warnings from MapFile methods
 public class HalfMapFileReader extends BloomFilterMapFile.Reader {
   private final boolean top;
   private final HStoreKey midkey;
@@ -76,7 +77,6 @@
    * @param hri
    * @throws IOException
    */
-  @SuppressWarnings("unchecked")
   public HalfMapFileReader(final FileSystem fs, final String dirName, 
       final Configuration conf, final Range r,
       final WritableComparable<HStoreKey> mk, final boolean filter,
@@ -164,7 +164,7 @@
     return closest;
   }
 
-  @SuppressWarnings({"unused", "unchecked"})
+  @SuppressWarnings("unchecked")
   @Override
   public synchronized WritableComparable midKey() throws IOException {
     // Returns null to indicate file is not splitable.

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Mon Jan 19 22:32:36 2009
@@ -45,7 +45,7 @@
  * if passed a value type that it has not already been told about. Its  been
  * primed with hbase Writables and byte [].  Keys are always byte arrays.
  *
- * @param <byte []> key
+ * @param <byte []> key  TODO: Parameter K is never used, could be removed.
  * @param <V> value Expects a Writable or byte [].
  */
 public class HbaseMapWritable <K, V>
@@ -164,13 +164,13 @@
   // Writable
 
   /** @return the Class class for the specified id */
-  @SuppressWarnings({ "unchecked", "boxing" })
+  @SuppressWarnings("boxing")
   protected Class<?> getClass(byte id) {
     return CODE_TO_CLASS.get(id);
   }
 
   /** @return the id for the specified Class */
-  @SuppressWarnings({ "unchecked", "boxing" })
+  @SuppressWarnings("boxing")
   protected byte getId(Class<?> clazz) {
     Byte b = CLASS_TO_CODE.get(clazz);
     if (b == null) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Mon Jan 19 22:32:36 2009
@@ -40,7 +40,6 @@
 import org.apache.hadoop.hbase.filter.RowFilterSet;
 import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.ObjectWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
@@ -243,51 +242,54 @@
                                  Configuration conf)
   throws IOException {
 
-    if (instance == null) {                       // null
-      instance = new NullInstance(declaredClass, conf);
-      declaredClass = Writable.class;
+    Object instanceObj = instance;
+    Class declClass = declaredClass;
+    
+    if (instanceObj == null) {                       // null
+      instanceObj = new NullInstance(declClass, conf);
+      declClass = Writable.class;
     }
-    writeClassCode(out, declaredClass);
-    if (declaredClass.isArray()) {                // array
+    writeClassCode(out, declClass);
+    if (declClass.isArray()) {                // array
       // If bytearray, just dump it out -- avoid the recursion and
       // byte-at-a-time we were previously doing.
-      if (declaredClass.equals(byte [].class)) {
-        Bytes.writeByteArray(out, (byte [])instance);
+      if (declClass.equals(byte [].class)) {
+        Bytes.writeByteArray(out, (byte [])instanceObj);
       } else {
-        int length = Array.getLength(instance);
+        int length = Array.getLength(instanceObj);
         out.writeInt(length);
         for (int i = 0; i < length; i++) {
-          writeObject(out, Array.get(instance, i),
-                    declaredClass.getComponentType(), conf);
+          writeObject(out, Array.get(instanceObj, i),
+                    declClass.getComponentType(), conf);
         }
       }
-    } else if (declaredClass == String.class) {   // String
-      Text.writeString(out, (String)instance);
-    } else if (declaredClass.isPrimitive()) {     // primitive type
-      if (declaredClass == Boolean.TYPE) {        // boolean
-        out.writeBoolean(((Boolean)instance).booleanValue());
-      } else if (declaredClass == Character.TYPE) { // char
-        out.writeChar(((Character)instance).charValue());
-      } else if (declaredClass == Byte.TYPE) {    // byte
-        out.writeByte(((Byte)instance).byteValue());
-      } else if (declaredClass == Short.TYPE) {   // short
-        out.writeShort(((Short)instance).shortValue());
-      } else if (declaredClass == Integer.TYPE) { // int
-        out.writeInt(((Integer)instance).intValue());
-      } else if (declaredClass == Long.TYPE) {    // long
-        out.writeLong(((Long)instance).longValue());
-      } else if (declaredClass == Float.TYPE) {   // float
-        out.writeFloat(((Float)instance).floatValue());
-      } else if (declaredClass == Double.TYPE) {  // double
-        out.writeDouble(((Double)instance).doubleValue());
-      } else if (declaredClass == Void.TYPE) {    // void
+    } else if (declClass == String.class) {   // String
+      Text.writeString(out, (String)instanceObj);
+    } else if (declClass.isPrimitive()) {     // primitive type
+      if (declClass == Boolean.TYPE) {        // boolean
+        out.writeBoolean(((Boolean)instanceObj).booleanValue());
+      } else if (declClass == Character.TYPE) { // char
+        out.writeChar(((Character)instanceObj).charValue());
+      } else if (declClass == Byte.TYPE) {    // byte
+        out.writeByte(((Byte)instanceObj).byteValue());
+      } else if (declClass == Short.TYPE) {   // short
+        out.writeShort(((Short)instanceObj).shortValue());
+      } else if (declClass == Integer.TYPE) { // int
+        out.writeInt(((Integer)instanceObj).intValue());
+      } else if (declClass == Long.TYPE) {    // long
+        out.writeLong(((Long)instanceObj).longValue());
+      } else if (declClass == Float.TYPE) {   // float
+        out.writeFloat(((Float)instanceObj).floatValue());
+      } else if (declClass == Double.TYPE) {  // double
+        out.writeDouble(((Double)instanceObj).doubleValue());
+      } else if (declClass == Void.TYPE) {    // void
       } else {
-        throw new IllegalArgumentException("Not a primitive: "+declaredClass);
+        throw new IllegalArgumentException("Not a primitive: "+declClass);
       }
-    } else if (declaredClass.isEnum()) {         // enum
-      Text.writeString(out, ((Enum)instance).name());
-    } else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable
-      Class <?> c = instance.getClass();
+    } else if (declClass.isEnum()) {         // enum
+      Text.writeString(out, ((Enum)instanceObj).name());
+    } else if (Writable.class.isAssignableFrom(declClass)) { // Writable
+      Class <?> c = instanceObj.getClass();
       Byte code = CLASS_TO_CODE.get(c);
       if (code == null) {
         out.writeByte(NOT_ENCODED);
@@ -295,9 +297,9 @@
       } else {
         writeClassCode(out, c);
       }
-      ((Writable)instance).write(out);
+      ((Writable)instanceObj).write(out);
     } else {
-      throw new IOException("Can't write: "+instance+" as "+declaredClass);
+      throw new IOException("Can't write: "+instanceObj+" as "+declClass);
     }
   }
   

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java Mon Jan 19 22:32:36 2009
@@ -36,7 +36,7 @@
  * capacity as {@link org.apache.hadoop.io.BytesWritable} does. Hence its
  * comparatively 'immutable'.
  */
-public class ImmutableBytesWritable implements WritableComparable {
+public class ImmutableBytesWritable implements WritableComparable<ImmutableBytesWritable> {
   private byte[] bytes;
   
   /**
@@ -129,8 +129,8 @@
    * @return Positive if left is bigger than right, 0 if they are equal, and
    *         negative if left is smaller than right.
    */
-  public int compareTo(Object right_obj) {
-    return compareTo(((ImmutableBytesWritable)right_obj).get());
+  public int compareTo(ImmutableBytesWritable right_obj) {
+    return compareTo(right_obj.get());
   }
   
   /**
@@ -153,7 +153,7 @@
       return compareTo((byte [])right_obj) == 0;
     }
     if (right_obj instanceof ImmutableBytesWritable) {
-      return compareTo(right_obj) == 0;
+      return compareTo((ImmutableBytesWritable)right_obj) == 0;
     }
     return false;
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java?rev=735946&r1=735945&r2=735946&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java Mon Jan 19 22:32:36 2009
@@ -71,13 +71,13 @@
   // Map interface
   // 
   
-  public Cell put(@SuppressWarnings("unused") byte [] key,
-    @SuppressWarnings("unused") Cell value) {
+  public Cell put(byte [] key,
+    Cell value) {
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
   @SuppressWarnings("unchecked")
-  public void putAll(@SuppressWarnings("unused") Map map) {
+  public void putAll(Map map) {
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
@@ -85,7 +85,7 @@
     return this.cells.get(key);
   }
 
-  public Cell remove(@SuppressWarnings("unused") Object key) {
+  public Cell remove(Object key) {
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
@@ -97,7 +97,7 @@
     return cells.containsKey(Bytes.toBytes(key));
   }
 
-  public boolean containsValue(@SuppressWarnings("unused") Object value) {
+  public boolean containsValue(Object value) {
     throw new UnsupportedOperationException("Don't support containsValue!");
   }
 
@@ -188,7 +188,7 @@
       this.cell = cell;
     }
     
-    public Cell setValue(@SuppressWarnings("unused") Cell c) {
+    public Cell setValue(Cell c) {
       throw new UnsupportedOperationException("RowResult is read-only!");
     }