You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ji...@apache.org on 2008/09/09 22:36:53 UTC
svn commit: r693597 [1/3] - in /hadoop/hbase/trunk: ./
src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/
src/java/org/apache/hadoop/hbase/client/transactional/
src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop...
Author: jimk
Date: Tue Sep 9 13:36:49 2008
New Revision: 693597
URL: http://svn.apache.org/viewvc?rev=693597&view=rev
Log:
HBASE-465 Fix javadoc for all public declarations
Modified:
hadoop/hbase/trunk/CHANGES.txt
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Chore.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ColumnNameParseException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DoNotRetryIOException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DroppedSnapshotException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionLocation.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerAddress.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerInfo.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerLoad.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LeaseException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Leases.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/RegionHistorian.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerCallable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/LocalTransactionLogger.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionLogger.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionalTable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HbaseRPC.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/LuceneDocumentWrapper.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableSplit.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/DeleteColumn.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/HMaster.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/MetaRegion.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/MetaScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ModifyColumn.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionClose.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessRegionOpen.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ProcessServerShutdown.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RegionServerOperation.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/RootScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/ServerManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/TableOperation.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/BeforeThisStoreKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Flusher.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLog.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogEdit.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HLogKey.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStore.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HStoreScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/Memcache.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/CleanOldTransactionsChore.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionState.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalHLogManager.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegionServer.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/Dispatcher.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/JenkinsHash.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/VersionInfo.java
hadoop/hbase/trunk/src/java/org/apache/hadoop/ipc/HBaseClient.java
hadoop/hbase/trunk/src/java/org/onelab/filter/BloomFilter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/CountingBloomFilter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/DynamicBloomFilter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/Filter.java
hadoop/hbase/trunk/src/java/org/onelab/filter/Key.java
hadoop/hbase/trunk/src/java/org/onelab/filter/RetouchedBloomFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DFSAbort.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestClassMigration.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestListTables.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestInclusiveStopRowFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestPageRowFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRowFilterAfterWrite.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestStopRowFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/filter/TestWhileMatchRowFilter.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestBloomFilters.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/regionserver/transactional/TestTransactionalHLogManager.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestKeying.java
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/util/TestMergeTool.java
Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Tue Sep 9 13:36:49 2008
@@ -82,6 +82,7 @@
shell or thrift server, etc. (Jonathan Gray via Jim Kellerman)
HBASE-871 Major compaction periodicity should be specifyable at the column
family level, not cluster wide (Jonathan Gray via Stack)
+ HBASE-465 Fix javadoc for all public declarations
NEW FEATURES
HBASE-787 Postgresql to HBase table replication example (Tim Sell via Stack)
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Chore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Chore.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Chore.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Chore.java Tue Sep 9 13:36:49 2008
@@ -49,7 +49,6 @@
this.stop = s;
}
- /** {@inheritDoc} */
@Override
public void run() {
try {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ColumnNameParseException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ColumnNameParseException.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ColumnNameParseException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ColumnNameParseException.java Tue Sep 9 13:36:49 2008
@@ -22,11 +22,16 @@
/**
* Thrown if issue with passed column name.
*/
+@SuppressWarnings("serial")
public class ColumnNameParseException extends DoNotRetryIOException {
+ /** default constructor */
public ColumnNameParseException() {
super();
}
+ /**
+ * @param message
+ */
public ColumnNameParseException(String message) {
super(message);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DoNotRetryIOException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DoNotRetryIOException.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DoNotRetryIOException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DoNotRetryIOException.java Tue Sep 9 13:36:49 2008
@@ -25,11 +25,18 @@
* Subclass if exception is not meant to be retried: e.g.
* {@link UnknownScannerException}
*/
+@SuppressWarnings("serial")
public class DoNotRetryIOException extends IOException {
+ /**
+ * default constructor
+ */
public DoNotRetryIOException() {
super();
}
+ /**
+ * @param message
+ */
public DoNotRetryIOException(String message) {
super(message);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DroppedSnapshotException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DroppedSnapshotException.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DroppedSnapshotException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/DroppedSnapshotException.java Tue Sep 9 13:36:49 2008
@@ -21,11 +21,18 @@
* Thrown during flush if the possibility snapshot content was not properly
* persisted into store files. Response should include replay of hlog content.
*/
+@SuppressWarnings("serial")
public class DroppedSnapshotException extends IOException {
+ /**
+ * @param msg
+ */
public DroppedSnapshotException(String msg) {
super(msg);
}
+ /**
+ * default constructor
+ */
public DroppedSnapshotException() {
super();
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java Tue Sep 9 13:36:49 2008
@@ -62,13 +62,13 @@
BLOCK
}
- public static final String COMPRESSION = "COMPRESSION";
- public static final String BLOCKCACHE = "BLOCKCACHE";
- public static final String LENGTH = "LENGTH";
- public static final String TTL = "TTL";
- public static final String BLOOMFILTER = "BLOOMFILTER";
- public static final String FOREVER = "FOREVER";
- public static final String MAPFILE_INDEX_INTERVAL =
+ public static final String COMPRESSION = "COMPRESSION"; //TODO: change to protected
+ public static final String BLOCKCACHE = "BLOCKCACHE"; //TODO: change to protected
+ public static final String LENGTH = "LENGTH"; //TODO: change to protected
+ public static final String TTL = "TTL"; //TODO: change to protected
+ public static final String BLOOMFILTER = "BLOOMFILTER"; //TODO: change to protected
+ public static final String FOREVER = "FOREVER"; //TODO: change to protected
+ public static final String MAPFILE_INDEX_INTERVAL = //TODO: change to protected
"MAPFILE_INDEX_INTERVAL";
/**
@@ -455,7 +455,6 @@
setValue(MAPFILE_INDEX_INTERVAL, Integer.toString(interval));
}
- /** {@inheritDoc} */
@Override
public String toString() {
StringBuffer s = new StringBuffer();
@@ -476,13 +475,11 @@
return s.toString();
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
return compareTo(obj) == 0;
}
- /** {@inheritDoc} */
@Override
public int hashCode() {
int result = Bytes.hashCode(this.name);
@@ -493,7 +490,7 @@
// Writable
- /** {@inheritDoc} */
+ @SuppressWarnings("deprecation")
public void readFields(DataInput in) throws IOException {
int version = in.readByte();
if (version < 6) {
@@ -543,7 +540,6 @@
}
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
out.writeByte(COLUMN_DESCRIPTOR_VERSION);
Bytes.writeByteArray(out, this.name);
@@ -557,7 +553,6 @@
// Comparable
- /** {@inheritDoc} */
public int compareTo(Object o) {
HColumnDescriptor other = (HColumnDescriptor)o;
int result = Bytes.compareTo(this.name, other.getName());
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java Tue Sep 9 13:36:49 2008
@@ -29,8 +29,12 @@
/** long constant for zero */
static final Long ZERO_L = Long.valueOf(0L);
-
+
+ //TODO: NINES is only used in HBaseAdmin and HConnectionManager. Move to client
+ // package and change visibility to default
static final String NINES = "99999999999999";
+ //TODO: ZEROS is only used in HConnectionManager and MetaScanner. Move to
+ // client package and change visibility to default
static final String ZEROES = "00000000000000";
// For migration
@@ -224,11 +228,18 @@
* Unlimited time-to-live.
*/
static final int FOREVER = -1;
-
+
+ //TODO: HBASE_CLIENT_RETRIES_NUMBER_KEY is only used by TestMigrate. Move it
+ // there.
public static final String HBASE_CLIENT_RETRIES_NUMBER_KEY =
"hbase.client.retries.number";
+ //TODO: DEFAULT_CLIENT_RETRIES is not referenced anywhere. Remove it.
public static final int DEFAULT_CLIENT_RETRIES = 5;
+ //TODO: although the following are referenced widely to format strings for
+ // the shell. They really aren't a part of the public API. It would be
+ // nice if we could put them somewhere where they did not need to be
+ // public. They could have package visibility
public static final String NAME = "NAME";
public static final String VERSIONS = "VERSIONS";
public static final String IN_MEMORY = "IN_MEMORY";
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HMsg.java Tue Sep 9 13:36:49 2008
@@ -40,6 +40,7 @@
* Message types sent between master and regionservers
*/
public static enum Type {
+ /** null message */
MSG_NONE,
// Message types sent from master to region server
@@ -100,14 +101,19 @@
private byte[] message = null;
// Some useful statics. Use these rather than create a new HMsg each time.
+ //TODO: move the following to HRegionServer
public static final HMsg REPORT_EXITING = new HMsg(Type.MSG_REPORT_EXITING);
public static final HMsg REPORT_QUIESCED = new HMsg(Type.MSG_REPORT_QUIESCED);
+ //TODO: Move to o.a.h.h.master
public static final HMsg REGIONSERVER_QUIESCE =
new HMsg(Type.MSG_REGIONSERVER_QUIESCE);
+ //TODO: Move to o.a.h.h.master
public static final HMsg REGIONSERVER_STOP =
new HMsg(Type.MSG_REGIONSERVER_STOP);
+ //TODO: Move to o.a.h.h.master
public static final HMsg CALL_SERVER_STARTUP =
new HMsg(Type.MSG_CALL_SERVER_STARTUP);
+ //TODO: Move to o.a.h.h.master
public static final HMsg [] EMPTY_HMSG_ARRAY = new HMsg[0];
@@ -160,6 +166,7 @@
return this.info;
}
+ /** @return the type of message */
public Type getType() {
return this.type;
}
@@ -171,14 +178,12 @@
public boolean isType(final HMsg.Type other) {
return this.type.equals(other);
}
-
+
+ /** @return the message type */
public byte[] getMessage() {
return this.message;
}
- /**
- * {@inheritDoc}
- */
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
@@ -215,9 +220,6 @@
// Writable
//////////////////////////////////////////////////////////////////////////////
- /**
- * {@inheritDoc}
- */
public void write(DataOutput out) throws IOException {
out.writeInt(this.type.ordinal());
this.info.write(out);
@@ -229,9 +231,6 @@
}
}
- /**
- * {@inheritDoc}
- */
public void readFields(DataInput in) throws IOException {
int ordinal = in.readInt();
this.type = HMsg.Type.values()[ordinal];
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java Tue Sep 9 13:36:49 2008
@@ -65,6 +65,7 @@
private byte [] startKey = HConstants.EMPTY_BYTE_ARRAY;
protected HTableDescriptor tableDesc = null;
private int hashCode = -1;
+ //TODO: Move NO_HASH to HStoreFile which is really the only place it is used.
public static final int NO_HASH = -1;
private volatile int encodedName = NO_HASH;
@@ -308,9 +309,6 @@
this.offLine = offLine;
}
- /**
- * {@inheritDoc}
- */
@Override
public String toString() {
return "REGION => {" + HConstants.NAME + " => '" +
@@ -323,17 +321,11 @@
" TABLE => {" + this.tableDesc.toString() + "}";
}
- /**
- * {@inheritDoc}
- */
@Override
public boolean equals(Object o) {
return this.compareTo(o) == 0;
}
- /**
- * {@inheritDoc}
- */
@Override
public int hashCode() {
return this.hashCode;
@@ -349,7 +341,6 @@
// Writable
//
- /** {@inheritDoc} */
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
@@ -363,7 +354,6 @@
out.writeInt(hashCode);
}
- /** {@inheritDoc} */
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
@@ -382,9 +372,6 @@
// Comparable
//
- /**
- * {@inheritDoc}
- */
public int compareTo(Object o) {
HRegionInfo other = (HRegionInfo) o;
if (other == null) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionLocation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionLocation.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionLocation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionLocation.java Tue Sep 9 13:36:49 2008
@@ -39,26 +39,17 @@
this.serverAddress = serverAddress;
}
- /**
- * {@inheritDoc}
- */
@Override
public String toString() {
return "address: " + this.serverAddress.toString() + ", regioninfo: " +
this.regionInfo;
}
- /**
- * {@inheritDoc}
- */
@Override
public boolean equals(Object o) {
return this.compareTo(o) == 0;
}
- /**
- * {@inheritDoc}
- */
@Override
public int hashCode() {
int result = this.regionInfo.hashCode();
@@ -80,9 +71,6 @@
// Comparable
//
- /**
- * {@inheritDoc}
- */
public int compareTo(Object o) {
HRegionLocation other = (HRegionLocation) o;
int result = this.regionInfo.compareTo(other.regionInfo);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerAddress.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerAddress.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerAddress.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerAddress.java Tue Sep 9 13:36:49 2008
@@ -109,25 +109,16 @@
return address;
}
- /**
- * {@inheritDoc}
- */
@Override
public String toString() {
return (stringValue == null ? "" : stringValue);
}
- /**
- * {@inheritDoc}
- */
@Override
public boolean equals(Object o) {
return this.compareTo(o) == 0;
}
- /**
- * {@inheritDoc}
- */
@Override
public int hashCode() {
int result = this.address.hashCode();
@@ -139,9 +130,6 @@
// Writable
//
- /**
- * {@inheritDoc}
- */
public void readFields(DataInput in) throws IOException {
String bindAddress = in.readUTF();
int port = in.readInt();
@@ -156,9 +144,6 @@
}
}
- /**
- * {@inheritDoc}
- */
public void write(DataOutput out) throws IOException {
if(address == null) {
out.writeUTF("");
@@ -174,9 +159,6 @@
// Comparable
//
- /**
- * {@inheritDoc}
- */
public int compareTo(Object o) {
HServerAddress that = (HServerAddress)o;
// Addresses as Strings may not compare though address is for the one
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerInfo.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerInfo.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerInfo.java Tue Sep 9 13:36:49 2008
@@ -115,20 +115,17 @@
this.startCode = startCode;
}
- /** {@inheritDoc} */
@Override
public String toString() {
return "address: " + this.serverAddress + ", startcode: " + this.startCode
+ ", load: (" + this.load.toString() + ")";
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
return compareTo(obj) == 0;
}
- /** {@inheritDoc} */
@Override
public int hashCode() {
int result = this.serverAddress.hashCode();
@@ -140,7 +137,6 @@
// Writable
- /** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
this.serverAddress.readFields(in);
this.startCode = in.readLong();
@@ -148,7 +144,6 @@
this.infoPort = in.readInt();
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
this.serverAddress.write(out);
out.writeLong(this.startCode);
@@ -156,7 +151,6 @@
out.writeInt(this.infoPort);
}
- /** {@inheritDoc} */
public int compareTo(Object o) {
HServerInfo that = (HServerInfo)o;
int result = getServerAddress().compareTo(that.getServerAddress());
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerLoad.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerLoad.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerLoad.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HServerLoad.java Tue Sep 9 13:36:49 2008
@@ -74,7 +74,6 @@
return numberOfRegions;
}
- /** {@inheritDoc} */
@Override
public String toString() {
return toString(1);
@@ -89,13 +88,11 @@
return "requests: " + numberOfRequests/msgInterval + " regions: " + numberOfRegions;
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object o) {
return compareTo(o) == 0;
}
- /** {@inheritDoc} */
@Override
public int hashCode() {
int result = Integer.valueOf(numberOfRequests).hashCode();
@@ -137,13 +134,11 @@
// Writable
- /** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
numberOfRequests = in.readInt();
numberOfRegions = in.readInt();
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
out.writeInt(numberOfRequests);
out.writeInt(numberOfRegions);
@@ -151,7 +146,6 @@
// Comparable
- /** {@inheritDoc} */
public int compareTo(Object o) {
HServerLoad other = (HServerLoad) o;
return this.getLoad() - other.getLoad();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreKey.java Tue Sep 9 13:36:49 2008
@@ -315,20 +315,17 @@
delimiterIndex) == 0;
}
- /** {@inheritDoc} */
@Override
public String toString() {
return Bytes.toString(this.row) + "/" + Bytes.toString(this.column) + "/" +
timestamp;
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
return compareTo(obj) == 0;
}
- /** {@inheritDoc} */
@Override
public int hashCode() {
int result = Bytes.hashCode(this.row);
@@ -339,7 +336,6 @@
// Comparable
- /** {@inheritDoc} */
public int compareTo(Object o) {
return compareTo(this.regionInfo, this, (HStoreKey)o);
}
@@ -556,14 +552,12 @@
// Writable
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
Bytes.writeByteArray(out, this.row);
Bytes.writeByteArray(out, this.column);
out.writeLong(timestamp);
}
- /** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
this.row = Bytes.readByteArray(in);
this.column = Bytes.readByteArray(in);
@@ -576,12 +570,15 @@
public static class HStoreKeyWritableComparator extends WritableComparator {
private final HRegionInfo hri;
+ /** @param hri */
public HStoreKeyWritableComparator(final HRegionInfo hri) {
super(HStoreKey.class);
this.hri = hri;
}
- public int compare(final WritableComparable left, final WritableComparable right) {
+ @Override
+ public int compare(final WritableComparable left,
+ final WritableComparable right) {
return compareTo(this.hri, (HStoreKey)left, (HStoreKey)right);
}
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java Tue Sep 9 13:36:49 2008
@@ -65,6 +65,8 @@
// Table metadata
protected Map<ImmutableBytesWritable,ImmutableBytesWritable> values =
new HashMap<ImmutableBytesWritable,ImmutableBytesWritable>();
+
+ //TODO: Why can't the following be private? They are only used within this class.
public static final String FAMILIES = "FAMILIES";
@@ -79,6 +81,8 @@
public static final boolean DEFAULT_READONLY = false;
public static final int DEFAULT_MEMCACHE_FLUSH_SIZE = 1024*1024*64;
+
+ // End TODO:
// Key is hash of the family name.
private final Map<Integer, HColumnDescriptor> families =
@@ -434,13 +438,11 @@
return s.toString();
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
return compareTo(obj) == 0;
}
- /** {@inheritDoc} */
@Override
public int hashCode() {
int result = Bytes.hashCode(this.name);
@@ -456,7 +458,6 @@
// Writable
- /** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
int version = in.readInt();
if (version < 3)
@@ -484,7 +485,6 @@
}
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
out.writeInt(TABLE_DESCRIPTOR_VERSION);
Bytes.writeByteArray(out, name);
@@ -506,7 +506,6 @@
// Comparable
- /** {@inheritDoc} */
public int compareTo(Object o) {
HTableDescriptor other = (HTableDescriptor) o;
int result = Bytes.compareTo(this.name, other.name);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LeaseException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LeaseException.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LeaseException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LeaseException.java Tue Sep 9 13:36:49 2008
@@ -19,11 +19,19 @@
*/
package org.apache.hadoop.hbase;
+/**
+ * Reports a problem with a lease
+ */
+@SuppressWarnings("serial")
public class LeaseException extends DoNotRetryIOException {
+ /** default constructor */
public LeaseException() {
super();
}
+ /**
+ * @param message
+ */
public LeaseException(String message) {
super(message);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Leases.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Leases.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Leases.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/Leases.java Tue Sep 9 13:36:49 2008
@@ -66,7 +66,6 @@
this.leaseCheckFrequency = leaseCheckFrequency;
}
- /** {@inheritDoc} */
@Override
public void run() {
while (!stopRequested || (stopRequested && leaseQueue.size() > 0) ) {
@@ -149,13 +148,18 @@
* Thrown if we are asked create a lease but lease on passed name already
* exists.
*/
+ @SuppressWarnings("serial")
public static class LeaseStillHeldException extends IOException {
private final String leaseName;
+ /**
+ * @param name
+ */
public LeaseStillHeldException(final String name) {
this.leaseName = name;
}
+ /** @return name of lease */
public String getName() {
return this.leaseName;
}
@@ -218,25 +222,21 @@
return this.listener;
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
return this.hashCode() == ((Lease) obj).hashCode();
}
- /** {@inheritDoc} */
@Override
public int hashCode() {
return this.leaseName.hashCode();
}
- /** {@inheritDoc} */
public long getDelay(TimeUnit unit) {
return unit.convert(this.expirationTime - System.currentTimeMillis(),
TimeUnit.MILLISECONDS);
}
- /** {@inheritDoc} */
public int compareTo(Delayed o) {
long delta = this.getDelay(TimeUnit.MILLISECONDS) -
o.getDelay(TimeUnit.MILLISECONDS);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/LocalHBaseCluster.java Tue Sep 9 13:36:49 2008
@@ -84,6 +84,7 @@
* @param noRegionServers Count of regionservers to start.
* @throws IOException
*/
+ @SuppressWarnings("unchecked")
public LocalHBaseCluster(final HBaseConfiguration conf,
final int noRegionServers)
throws IOException {
@@ -284,6 +285,10 @@
" " + this.regionThreads.size() + " region server(s)");
}
+ /**
+ * @param t
+ * @throws InterruptedException
+ */
public void threadDumpingJoin(final Thread t) throws InterruptedException {
if (t == null) {
return;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/RegionHistorian.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/RegionHistorian.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/RegionHistorian.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/RegionHistorian.java Tue Sep 9 13:36:49 2008
@@ -56,6 +56,8 @@
private static SimpleDateFormat dateFormat = new SimpleDateFormat(
"EEE, d MMM yyyy HH:mm:ss");
+ //TODO: Why is this public? Appears to only apply internally.
+
public static enum HistorianColumnKey {
REGION_CREATION ( Bytes.toBytes(COLUMN_FAMILY_HISTORIAN_STR+"creation")),
REGION_OPEN ( Bytes.toBytes(COLUMN_FAMILY_HISTORIAN_STR+"open")),
@@ -132,6 +134,7 @@
/**
* Method to add a creation event to the row in the .META table
* @param info
+ * @param serverName
*/
public void addRegionAssignment(HRegionInfo info, String serverName) {
add(HistorianColumnKey.REGION_ASSIGNMENT.key, "Region assigned to server "
@@ -175,6 +178,7 @@
/**
* Method to add a compaction event to the row in the .META table
* @param info
+ * @param timeTaken
*/
public void addRegionCompaction(final HRegionInfo info,
final String timeTaken) {
@@ -191,6 +195,7 @@
/**
* Method to add a flush event to the row in the .META table
* @param info
+ * @param timeTaken
*/
public void addRegionFlush(HRegionInfo info,
@SuppressWarnings("unused") String timeTaken) {
@@ -249,6 +254,11 @@
private String description;
+ /**
+ * @param timestamp
+ * @param event
+ * @param description
+ */
public RegionHistoryInformation(long timestamp, String event,
String description) {
this.timestamp = timestamp;
@@ -256,21 +266,21 @@
this.description = description;
}
- /**
- * Returns the inverse value of Long.compareTo
- */
public int compareTo(RegionHistoryInformation otherInfo) {
return -1 * Long.valueOf(timestamp).compareTo(otherInfo.getTimestamp());
}
+ /** @return the event */
public String getEvent() {
return event;
}
+ /** @return the description */
public String getDescription() {
return description;
}
+ /** @return the timestamp */
public long getTimestamp() {
return timestamp;
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Tue Sep 9 13:36:49 2008
@@ -173,7 +173,6 @@
return this.pause * HConstants.RETRY_BACKOFF[tries];
}
- /** {@inheritDoc} */
public HMasterInterface getMaster() throws MasterNotRunningException {
HServerAddress masterLocation = null;
synchronized (this.masterLock) {
@@ -224,7 +223,6 @@
return this.master;
}
- /** {@inheritDoc} */
public boolean isMasterRunning() {
if (this.master == null) {
try {
@@ -237,7 +235,6 @@
return true;
}
- /** {@inheritDoc} */
public boolean tableExists(final byte [] tableName)
throws MasterNotRunningException {
getMaster();
@@ -271,7 +268,6 @@
Bytes.equals(n, META_TABLE_NAME);
}
- /** {@inheritDoc} */
public HRegionLocation getRegionLocation(final byte [] name,
final byte [] row, boolean reload)
throws IOException {
@@ -279,7 +275,6 @@
return reload? relocateRegion(name, row): locateRegion(name, row);
}
- /** {@inheritDoc} */
public HTableDescriptor[] listTables() throws IOException {
getMaster();
final HashSet<HTableDescriptor> uniqueTables =
@@ -287,7 +282,6 @@
MetaScannerVisitor visitor = new MetaScannerVisitor() {
- /** {@inheritDoc} */
public boolean processRow(RowResult rowResult) throws IOException {
HRegionInfo info = Writables.getHRegionInfo(
rowResult.get(COL_REGIONINFO));
@@ -305,7 +299,6 @@
return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]);
}
- /** {@inheritDoc} */
public boolean isTableEnabled(byte[] tableName) throws IOException {
if (!tableExists(tableName)) {
throw new TableNotFoundException(Bytes.toString(tableName));
@@ -371,6 +364,7 @@
implements MetaScanner.MetaScannerVisitor {
byte[] tableName;
HTableDescriptor result;
+ //TODO: change visibility to protected
public HTableDescriptorFinder(byte[] tableName) {
this.tableName = tableName;
}
@@ -389,7 +383,6 @@
}
}
- /** {@inheritDoc} */
public HTableDescriptor getHTableDescriptor(final byte[] tableName)
throws IOException {
if (Bytes.equals(tableName, HConstants.ROOT_TABLE_NAME)) {
@@ -407,7 +400,6 @@
return result;
}
- /** {@inheritDoc} */
public HRegionLocation locateRegion(final byte [] tableName,
final byte [] row)
throws IOException{
@@ -415,7 +407,6 @@
return locateRegion(tableName, row, true);
}
- /** {@inheritDoc} */
public HRegionLocation relocateRegion(final byte [] tableName,
final byte [] row)
throws IOException{
@@ -724,7 +715,6 @@
tableLocations.put(startKey, location);
}
- /** {@inheritDoc} */
public HRegionInterface getHRegionConnection(HServerAddress regionServer)
throws IOException {
getMaster();
@@ -850,7 +840,6 @@
HRegionInfo.ROOT_REGIONINFO, rootRegionAddress);
}
- /** {@inheritDoc} */
public <T> T getRegionServerWithRetries(ServerCallable<T> callable)
throws IOException, RuntimeException {
getMaster();
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Tue Sep 9 13:36:49 2008
@@ -1059,7 +1059,9 @@
filter.validate(columns);
}
}
-
+
+ //TODO: change visibility to protected
+
public void initialize() throws IOException {
nextScanner();
}
@@ -1144,7 +1146,6 @@
return this.filter.filterAllRemaining();
}
- /** {@inheritDoc} */
public RowResult next() throws IOException {
if (this.closed) {
return null;
@@ -1160,9 +1161,6 @@
return null;
}
- /**
- * {@inheritDoc}
- */
public void close() {
if (callable != null) {
callable.setClose();
@@ -1179,7 +1177,6 @@
closed = true;
}
- /** {@inheritDoc} */
public Iterator<RowResult> iterator() {
return new Iterator<RowResult>() {
// The next RowResult, possibly pre-read
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/MetaScanner.java Tue Sep 9 13:36:49 2008
@@ -7,7 +7,7 @@
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Bytes; //TODO: remove
/**
* Scanner class that contains the <code>.META.</code> table scanning logic
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerCallable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerCallable.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerCallable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/ScannerCallable.java Tue Sep 9 13:36:49 2008
@@ -67,7 +67,6 @@
}
}
- /** {@inheritDoc} */
public RowResult call() throws IOException {
if (scannerId != -1L && closed) {
server.close(scannerId);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java Tue Sep 9 13:36:49 2008
@@ -2,53 +2,68 @@
import org.apache.hadoop.hbase.HColumnDescriptor;
+/**
+ * Immutable HColumnDescriptor
+ */
public class UnmodifyableHColumnDescriptor extends HColumnDescriptor {
+ /**
+ * @param desc
+ */
public UnmodifyableHColumnDescriptor (final HColumnDescriptor desc) {
super(desc);
}
@Override
+ @SuppressWarnings("unused")
public void setValue(byte[] key, byte[] value) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setValue(String key, String value) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setMaxVersions(int maxVersions) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setInMemory(boolean inMemory) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setBlockCacheEnabled(boolean blockCacheEnabled) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setMaxValueLength(int maxLength) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setTimeToLive(int timeToLive) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setCompressionType(CompressionType type) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setMapFileIndexInterval(int interval) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java Tue Sep 9 13:36:49 2008
@@ -27,6 +27,7 @@
* Read-only table descriptor.
*/
public class UnmodifyableHTableDescriptor extends HTableDescriptor {
+ /** Default constructor */
public UnmodifyableHTableDescriptor() {
super();
}
@@ -73,31 +74,37 @@
}
@Override
+ @SuppressWarnings("unused")
public void setInMemory(boolean inMemory) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setReadOnly(boolean readOnly) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setValue(byte[] key, byte[] value) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setValue(String key, String value) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setMaxFileSize(long maxFileSize) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
@Override
+ @SuppressWarnings("unused")
public void setMemcacheFlushSize(int memcacheFlushSize) {
throw new UnsupportedOperationException("HTableDescriptor is read-only");
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java Tue Sep 9 13:36:49 2008
@@ -22,20 +22,32 @@
/** Thrown when a transaction cannot be committed.
*
*/
+@SuppressWarnings("serial")
public class CommitUnsuccessfulException extends Exception {
+ /** Default Constructor */
public CommitUnsuccessfulException() {
super();
}
+ /**
+ * @param arg0 message
+ * @param arg1 cause
+ */
public CommitUnsuccessfulException(String arg0, Throwable arg1) {
super(arg0, arg1);
}
+ /**
+ * @param arg0 message
+ */
public CommitUnsuccessfulException(String arg0) {
super(arg0);
}
+ /**
+ * @param arg0 cause
+ */
public CommitUnsuccessfulException(Throwable arg0) {
super(arg0);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/LocalTransactionLogger.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/LocalTransactionLogger.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/LocalTransactionLogger.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/LocalTransactionLogger.java Tue Sep 9 13:36:49 2008
@@ -33,6 +33,11 @@
private static LocalTransactionLogger instance;
+ /**
+ * Creates singleton if it does not exist
+ *
+ * @return reference to singleton
+ */
public synchronized static LocalTransactionLogger getInstance() {
if (instance == null) {
instance = new LocalTransactionLogger();
@@ -48,7 +53,7 @@
// Enforce singlton
}
- // Gives back random longs to minimize possibility of collision
+ /** @return random longs to minimize possibility of collision */
public long createNewTransactionLog() {
long id = random.nextLong();
transactionIdToStatusMap.put(id, TransactionStatus.PENDING);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionLogger.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionLogger.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionLogger.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionLogger.java Tue Sep 9 13:36:49 2008
@@ -26,8 +26,14 @@
*/
public interface TransactionLogger {
+ /** Transaction status values */
enum TransactionStatus {
- PENDING, COMMITTED, ABORTED
+ /** Transaction is pending */
+ PENDING,
+ /** Transaction was committed */
+ COMMITTED,
+ /** Transaction was aborted */
+ ABORTED
}
/**
@@ -38,8 +44,16 @@
*/
long createNewTransactionLog();
+ /**
+ * @param transactionId
+ * @return transaction status
+ */
TransactionStatus getStatusForTransaction(long transactionId);
+ /**
+ * @param transactionId
+ * @param status
+ */
void setStatusForTransaction(long transactionId, TransactionStatus status);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionManager.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionManager.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionManager.java Tue Sep 9 13:36:49 2008
@@ -40,10 +40,17 @@
private final HConnection connection;
private final TransactionLogger transactionLogger;
+ /**
+ * @param conf
+ */
public TransactionManager(final HBaseConfiguration conf) {
this(LocalTransactionLogger.getInstance(), conf);
}
+ /**
+ * @param transactionLogger
+ * @param conf
+ */
public TransactionManager(final TransactionLogger transactionLogger,
final HBaseConfiguration conf) {
this.transactionLogger = transactionLogger;
@@ -66,6 +73,7 @@
*
* @param transactionState
* @throws IOException
+ * @throws CommitUnsuccessfulException
*/
public void tryCommit(final TransactionState transactionState)
throws CommitUnsuccessfulException, IOException {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionalTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionalTable.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionalTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/TransactionalTable.java Tue Sep 9 13:36:49 2008
@@ -40,11 +40,21 @@
*/
public class TransactionalTable extends HTable {
+ /**
+ * @param conf
+ * @param tableName
+ * @throws IOException
+ */
public TransactionalTable(final HBaseConfiguration conf,
final String tableName) throws IOException {
super(conf, tableName);
}
+ /**
+ * @param conf
+ * @param tableName
+ * @throws IOException
+ */
public TransactionalTable(final HBaseConfiguration conf,
final byte[] tableName) throws IOException {
super(conf, tableName);
@@ -66,6 +76,12 @@
}
}
+ /**
+ * @param connection
+ * @param tableName
+ * @param row
+ * @param transactionState
+ */
public TransactionalServerCallable(final HConnection connection,
final byte[] tableName, final byte[] row,
final TransactionState transactionState) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java Tue Sep 9 13:36:49 2008
@@ -24,6 +24,7 @@
/**
* Thrown if a region server is passed an unknown transaction id
*/
+@SuppressWarnings("serial")
public class UnknownTransactionException extends DoNotRetryIOException {
/** constructor */
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java Tue Sep 9 13:36:49 2008
@@ -40,9 +40,20 @@
*/
public class ColumnValueFilter implements RowFilterInterface {
- /** Comparison operator. */
+ /** Comparison operators. */
public enum CompareOp {
- LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER;
+ /** less than */
+ LESS,
+ /** less than or equal to */
+ LESS_OR_EQUAL,
+ /** equals */
+ EQUAL,
+ /** not equal */
+ NOT_EQUAL,
+ /** greater than or equal to */
+ GREATER_OR_EQUAL,
+ /** greater than */
+ GREATER;
}
private byte[] columnName;
@@ -82,12 +93,10 @@
this.comparator = comparator;
}
- /** {@inheritDoc} */
public boolean filterRowKey(@SuppressWarnings("unused") final byte[] rowKey) {
return false;
}
- /** {@inheritDoc} */
public boolean filterColumn(@SuppressWarnings("unused") final byte[] rowKey,
final byte[] colKey, final byte[] data) {
if (!Arrays.equals(colKey, columnName)) {
@@ -119,12 +128,10 @@
}
}
- /** {@inheritDoc} */
public boolean filterAllRemaining() {
return false;
}
- /** {@inheritDoc} */
public boolean filterRow(final SortedMap<byte[], Cell> columns) {
// Don't let rows through if they don't have the column we are checking
return !columns.containsKey(columnName);
@@ -141,28 +148,23 @@
return b1.length - b2.length;
}
- /** {@inheritDoc} */
public boolean processAlways() {
return false;
}
- /** {@inheritDoc} */
public void reset() {
// Nothing.
}
- /** {@inheritDoc} */
public void rowProcessed(@SuppressWarnings("unused") final boolean filtered,
@SuppressWarnings("unused") final byte[] key) {
// Nothing
}
- /** {@inheritDoc} */
public void validate(@SuppressWarnings("unused") final byte[][] columns) {
// Nothing
}
- /** {@inheritDoc} */
public void readFields(final DataInput in) throws IOException {
int valueLen = in.readInt();
if (valueLen > 0) {
@@ -175,7 +177,6 @@
new HBaseConfiguration());
}
- /** {@inheritDoc} */
public void write(final DataOutput out) throws IOException {
if (value == null) {
out.writeInt(0);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/InclusiveStopRowFilter.java Tue Sep 9 13:36:49 2008
@@ -41,7 +41,6 @@
super(stopRowKey);
}
- /** {@inheritDoc} */
@Override
public boolean filterRowKey(final byte [] rowKey) {
if (rowKey == null) {
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java Tue Sep 9 13:36:49 2008
@@ -61,23 +61,14 @@
this.pageSize = pageSize;
}
- /**
- *
- * {@inheritDoc}
- */
public void validate(@SuppressWarnings("unused") final byte [][] columns) {
// Doesn't filter columns
}
- /**
- *
- * {@inheritDoc}
- */
public void reset() {
rowsAccepted = 0;
}
- /** {@inheritDoc} */
public void rowProcessed(boolean filtered,
@SuppressWarnings("unused") byte [] rowKey) {
if (!filtered) {
@@ -85,61 +76,33 @@
}
}
- /**
- *
- * {@inheritDoc}
- */
public boolean processAlways() {
return false;
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterAllRemaining() {
return this.rowsAccepted > this.pageSize;
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterRowKey(@SuppressWarnings("unused") final byte [] r) {
return filterAllRemaining();
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey,
@SuppressWarnings("unused") final byte [] colKey,
@SuppressWarnings("unused") final byte[] data) {
return filterAllRemaining();
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterRow(@SuppressWarnings("unused")
final SortedMap<byte [], Cell> columns) {
return filterAllRemaining();
}
- /**
- *
- * {@inheritDoc}
- */
public void readFields(final DataInput in) throws IOException {
this.pageSize = in.readLong();
}
- /**
- *
- * {@inheritDoc}
- */
public void write(final DataOutput out) throws IOException {
out.writeLong(pageSize);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java Tue Sep 9 13:36:49 2008
@@ -86,13 +86,11 @@
this.setColumnFilters(columnFilter);
}
- /** {@inheritDoc} */
@SuppressWarnings("unused")
public void rowProcessed(boolean filtered, byte [] rowKey) {
//doesn't care
}
- /** {@inheritDoc} */
public boolean processAlways() {
return false;
}
@@ -134,36 +132,20 @@
}
}
- /**
- *
- * {@inheritDoc}
- */
public void reset() {
// Nothing to reset
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterAllRemaining() {
return false;
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterRowKey(final byte [] rowKey) {
return (filtersByRowKey() && rowKey != null)?
!getRowKeyPattern().matcher(Bytes.toString(rowKey)).matches():
false;
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
final byte[] data) {
if (filterRowKey(rowKey)) {
@@ -183,10 +165,6 @@
return false;
}
- /**
- *
- * {@inheritDoc}
- */
public boolean filterRow(final SortedMap<byte [], Cell> columns) {
for (Entry<byte [], Cell> col : columns.entrySet()) {
if (nullColumns.contains(col.getKey())
@@ -225,10 +203,6 @@
return rowKeyPattern;
}
- /**
- *
- * {@inheritDoc}
- */
public void readFields(final DataInput in) throws IOException {
boolean hasRowKeyPattern = in.readBoolean();
if (hasRowKeyPattern) {
@@ -255,10 +229,6 @@
}
}
- /**
- *
- * {@inheritDoc}
- */
public void validate(final byte [][] columns) {
Set<byte []> invalids = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
for (byte [] colKey : getFilterColumns()) {
@@ -289,10 +259,6 @@
return cols;
}
- /**
- *
- * {@inheritDoc}
- */
public void write(final DataOutput out) throws IOException {
if (!filtersByRowKey()) {
out.writeBoolean(false);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowFilterSet.java Tue Sep 9 13:36:49 2008
@@ -80,28 +80,24 @@
this.operator = operator;
}
- /** {@inheritDoc} */
public void validate(final byte [][] columns) {
for (RowFilterInterface filter : filters) {
filter.validate(columns);
}
}
- /** {@inheritDoc} */
public void reset() {
for (RowFilterInterface filter : filters) {
filter.reset();
}
}
- /** {@inheritDoc} */
public void rowProcessed(boolean filtered, byte [] rowKey) {
for (RowFilterInterface filter : filters) {
filter.rowProcessed(filtered, rowKey);
}
}
- /** {@inheritDoc} */
public boolean processAlways() {
for (RowFilterInterface filter : filters) {
if (filter.processAlways()) {
@@ -111,7 +107,6 @@
return false;
}
- /** {@inheritDoc} */
public boolean filterAllRemaining() {
boolean result = operator == Operator.MUST_PASS_ONE;
for (RowFilterInterface filter : filters) {
@@ -128,7 +123,6 @@
return result;
}
- /** {@inheritDoc} */
public boolean filterRowKey(final byte [] rowKey) {
boolean resultFound = false;
boolean result = operator == Operator.MUST_PASS_ONE;
@@ -152,7 +146,6 @@
return result;
}
- /** {@inheritDoc} */
public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
final byte[] data) {
boolean resultFound = false;
@@ -179,7 +172,6 @@
return result;
}
- /** {@inheritDoc} */
public boolean filterRow(final SortedMap<byte [], Cell> columns) {
boolean resultFound = false;
boolean result = operator == Operator.MUST_PASS_ONE;
@@ -203,7 +195,6 @@
return result;
}
- /** {@inheritDoc} */
public void readFields(final DataInput in) throws IOException {
Configuration conf = new HBaseConfiguration();
byte opByte = in.readByte();
@@ -219,7 +210,6 @@
}
}
- /** {@inheritDoc} */
public void write(final DataOutput out) throws IOException {
Configuration conf = new HBaseConfiguration();
out.writeByte(operator.ordinal());
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java Tue Sep 9 13:36:49 2008
@@ -61,39 +61,27 @@
return this.stopRowKey;
}
- /**
- *
- * {@inheritDoc}
- */
public void validate(@SuppressWarnings("unused") final byte [][] columns) {
// Doesn't filter columns
}
- /**
- *
- * {@inheritDoc}
- */
public void reset() {
// Nothing to reset
}
- /** {@inheritDoc} */
@SuppressWarnings("unused")
public void rowProcessed(boolean filtered, byte [] rowKey) {
// Doesn't care
}
- /** {@inheritDoc} */
public boolean processAlways() {
return false;
}
- /** {@inheritDoc} */
public boolean filterAllRemaining() {
return false;
}
- /** {@inheritDoc} */
public boolean filterRowKey(final byte [] rowKey) {
if (rowKey == null) {
if (this.stopRowKey == null) {
@@ -105,8 +93,6 @@
}
/**
- * {@inheritDoc}
- *
* Because StopRowFilter does not examine column information, this method
* defaults to calling the rowKey-only version of filter.
*/
@@ -116,24 +102,19 @@
return filterRowKey(rowKey);
}
- /** {@inheritDoc}
- *
+ /**
* Because StopRowFilter does not examine column information, this method
* defaults to calling filterAllRemaining().
- *
- * @param columns
*/
public boolean filterRow(@SuppressWarnings("unused")
final SortedMap<byte [], Cell> columns) {
return filterAllRemaining();
}
- /** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
this.stopRowKey = Bytes.readByteArray(in);
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
Bytes.writeByteArray(out, this.stopRowKey);
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/WhileMatchRowFilter.java Tue Sep 9 13:36:49 2008
@@ -63,13 +63,11 @@
return this.filter;
}
- /** {@inheritDoc} */
public void reset() {
this.filterAllRemaining = false;
this.filter.reset();
}
- /** {@inheritDoc} */
public boolean processAlways() {
return true;
}
@@ -85,20 +83,17 @@
return this.filterAllRemaining || this.filter.filterAllRemaining();
}
- /** {@inheritDoc} */
public boolean filterRowKey(final byte [] rowKey) {
changeFAR(this.filter.filterRowKey(rowKey));
return filterAllRemaining();
}
- /** {@inheritDoc} */
public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
final byte[] data) {
changeFAR(this.filter.filterColumn(rowKey, colKey, data));
return filterAllRemaining();
}
- /** {@inheritDoc} */
public boolean filterRow(final SortedMap<byte [], Cell> columns) {
changeFAR(this.filter.filterRow(columns));
return filterAllRemaining();
@@ -114,17 +109,14 @@
this.filterAllRemaining = this.filterAllRemaining || value;
}
- /** {@inheritDoc} */
public void rowProcessed(boolean filtered, byte [] rowKey) {
this.filter.rowProcessed(filtered, rowKey);
}
- /** {@inheritDoc} */
public void validate(final byte [][] columns) {
this.filter.validate(columns);
}
- /** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
String className = in.readUTF();
@@ -144,7 +136,6 @@
}
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
out.writeUTF(this.filter.getClass().getName());
this.filter.write(out);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java Tue Sep 9 13:36:49 2008
@@ -78,6 +78,7 @@
* Initialize a BatchUpdate operation on a row with a specific timestamp.
*
* @param row
+ * @param timestamp
*/
public BatchUpdate(final String row, long timestamp){
this(Bytes.toBytes(row), timestamp);
@@ -87,6 +88,7 @@
* Initialize a BatchUpdate operation on a row with a specific timestamp.
*
* @param row
+ * @param timestamp
*/
public BatchUpdate(final byte [] row, long timestamp){
this.row = row;
@@ -100,7 +102,7 @@
}
/**
- * Return the timestamp this BatchUpdate will be committed with.
+ * @return the timestamp this BatchUpdate will be committed with.
*/
public long getTimestamp() {
return timestamp;
@@ -108,6 +110,8 @@
/**
* Set this BatchUpdate's timestamp.
+ *
+ * @param timestamp
*/
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java Tue Sep 9 13:36:49 2008
@@ -59,7 +59,7 @@
* @param fileLength
* @param blockSize the size of each block in bytes.
*/
- @SuppressWarnings("unchecked")
+ @SuppressWarnings({"unchecked", "serial"})
public BlockFSInputStream(InputStream in, long fileLength, int blockSize) {
this.in = in;
if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)) {
@@ -89,12 +89,12 @@
}
@Override
- public synchronized long getPos() throws IOException {
+ public synchronized long getPos() {
return pos;
}
@Override
- public synchronized int available() throws IOException {
+ public synchronized int available() {
return (int) (fileLength - pos);
}
@@ -108,6 +108,7 @@
}
@Override
+ @SuppressWarnings("unused")
public synchronized boolean seekToNewSource(long targetPos)
throws IOException {
return false;
@@ -194,6 +195,7 @@
}
@Override
+ @SuppressWarnings("unused")
public void mark(int readLimit) {
// Do nothing
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java Tue Sep 9 13:36:49 2008
@@ -106,7 +106,6 @@
return timestamps[0];
}
- /** {@inheritDoc} */
@Override
public String toString() {
if (this.values.length == 1) {
@@ -132,7 +131,6 @@
// Writable
//
- /** {@inheritDoc} */
public void readFields(final DataInput in) throws IOException {
int nvalues = in.readInt();
this.timestamps = new long[nvalues];
@@ -145,7 +143,6 @@
}
}
- /** {@inheritDoc} */
public void write(final DataOutput out) throws IOException {
out.writeInt(this.values.length);
for (int i = 0; i < this.timestamps.length; i++) {
@@ -160,7 +157,6 @@
// Iterable
//
- /** {@inheritDoc} */
public Iterator<Cell> iterator() {
return new CellIterator();
}
@@ -169,18 +165,15 @@
CellIterator() {
}
- /** {@inheritDoc} */
public boolean hasNext() {
return currentValue < values.length;
}
- /** {@inheritDoc} */
public Cell next() {
currentValue += 1;
return new Cell(values[currentValue], timestamps[currentValue]);
}
- /** {@inheritDoc} */
public void remove() throws UnsupportedOperationException {
throw new UnsupportedOperationException("remove is not supported");
}
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Tue Sep 9 13:36:49 2008
@@ -45,8 +45,9 @@
* if passed a Writable it has not already been told about. Its also been
* primed with hbase Writables. Keys are always byte arrays. Thats other
* difference from MapWritable.
- * TODO: Have generics enforce V is a subclass of Writable and K is a byte []
- * only.
+ *
+ * @param <K> key
+ * @param <V> value
*/
public class HbaseMapWritable <K, V>
implements SortedMap<byte [], V>, Writable, Configurable {
@@ -89,47 +90,38 @@
this.conf.set(conf);
}
- /** {@inheritDoc} */
public void clear() {
instance.clear();
}
- /** {@inheritDoc} */
public boolean containsKey(Object key) {
return instance.containsKey(key);
}
- /** {@inheritDoc} */
public boolean containsValue(Object value) {
return instance.containsValue(value);
}
- /** {@inheritDoc} */
public Set<Entry<byte [], V>> entrySet() {
return instance.entrySet();
}
- /** {@inheritDoc} */
public V get(Object key) {
return instance.get(key);
}
- /** {@inheritDoc} */
public boolean isEmpty() {
return instance.isEmpty();
}
- /** {@inheritDoc} */
public Set<byte []> keySet() {
return instance.keySet();
}
- /** {@inheritDoc} */
public int size() {
return instance.size();
}
- /** {@inheritDoc} */
public Collection<V> values() {
return instance.values();
}
@@ -193,7 +185,6 @@
return this.instance.toString();
}
- /** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
// Write out the number of entries in the map
out.writeInt(this.instance.size());
@@ -206,7 +197,7 @@
}
}
- /** {@inheritDoc} */
+ @SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
// First clear the map. Otherwise we will just accumulate
// entries every time this method is called.
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Tue Sep 9 13:36:49 2008
@@ -132,31 +132,43 @@
private Object instance;
private Configuration conf;
+ /** default constructor for writable */
public HbaseObjectWritable() {
super();
}
+ /**
+ * @param instance
+ */
public HbaseObjectWritable(Object instance) {
set(instance);
}
+ /**
+ * @param declaredClass
+ * @param instance
+ */
public HbaseObjectWritable(Class<?> declaredClass, Object instance) {
this.declaredClass = declaredClass;
this.instance = instance;
}
- /** Return the instance, or null if none. */
+ /** @return the instance, or null if none. */
public Object get() { return instance; }
- /** Return the class this is meant to be. */
+ /** @return the class this is meant to be. */
public Class<?> getDeclaredClass() { return declaredClass; }
- /** Reset the instance. */
+ /**
+ * Reset the instance.
+ * @param instance
+ */
public void set(Object instance) {
this.declaredClass = instance.getClass();
this.instance = instance;
}
+ @Override
public String toString() {
return "OW[class=" + declaredClass + ",value=" + instance + "]";
}
@@ -172,14 +184,18 @@
private static class NullInstance extends Configured implements Writable {
Class<?> declaredClass;
+ /** default constructor for writable */
public NullInstance() { super(null); }
+ /**
+ * @param declaredClass
+ * @param conf
+ */
public NullInstance(Class<?> declaredClass, Configuration conf) {
super(conf);
this.declaredClass = declaredClass;
}
- @SuppressWarnings("boxing")
public void readFields(DataInput in) throws IOException {
this.declaredClass = CODE_TO_CLASS.get(in.readByte());
}
@@ -195,7 +211,6 @@
* @param c
* @throws IOException
*/
- @SuppressWarnings("boxing")
static void writeClassCode(final DataOutput out, final Class<?> c)
throws IOException {
Byte code = CLASS_TO_CODE.get(c);
@@ -206,9 +221,16 @@
out.writeByte(code);
}
- /** Write a {@link Writable}, {@link String}, primitive type, or an array of
- * the preceding. */
- @SuppressWarnings({ "boxing", "unchecked" })
+ /**
+ * Write a {@link Writable}, {@link String}, primitive type, or an array of
+ * the preceding.
+ * @param out
+ * @param instance
+ * @param declaredClass
+ * @param conf
+ * @throws IOException
+ */
+ @SuppressWarnings("unchecked")
public static void writeObject(DataOutput out, Object instance,
Class declaredClass,
Configuration conf)
@@ -273,16 +295,29 @@
}
- /** Read a {@link Writable}, {@link String}, primitive type, or an array of
- * the preceding. */
+ /**
+ * Read a {@link Writable}, {@link String}, primitive type, or an array of
+ * the preceding.
+ * @param in
+ * @param conf
+ * @return the object
+ * @throws IOException
+ */
public static Object readObject(DataInput in, Configuration conf)
throws IOException {
return readObject(in, null, conf);
}
- /** Read a {@link Writable}, {@link String}, primitive type, or an array of
- * the preceding. */
- @SuppressWarnings({ "unchecked", "boxing" })
+ /**
+ * Read a {@link Writable}, {@link String}, primitive type, or an array of
+ * the preceding.
+ * @param in
+ * @param objectWritable
+ * @param conf
+ * @return the object
+ * @throws IOException
+ */
+ @SuppressWarnings("unchecked")
public static Object readObject(DataInput in,
HbaseObjectWritable objectWritable, Configuration conf)
throws IOException {
@@ -353,7 +388,6 @@
return instance;
}
- @SuppressWarnings("boxing")
private static void addToMap(final Class<?> clazz, final byte code) {
CLASS_TO_CODE.put(clazz, code);
CODE_TO_CLASS.put(code, clazz);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java Tue Sep 9 13:36:49 2008
@@ -106,13 +106,11 @@
}
- /** {@inheritDoc} */
public void readFields(final DataInput in) throws IOException {
this.bytes = new byte[in.readInt()];
in.readFully(this.bytes, 0, this.bytes.length);
}
- /** {@inheritDoc} */
public void write(final DataOutput out) throws IOException {
out.writeInt(this.bytes.length);
out.write(this.bytes, 0, this.bytes.length);
@@ -120,7 +118,6 @@
// Below methods copied from BytesWritable
- /** {@inheritDoc} */
@Override
public int hashCode() {
return WritableComparator.hashBytes(bytes, this.bytes.length);
@@ -150,7 +147,6 @@
0, that.length);
}
- /** {@inheritDoc} */
@Override
public boolean equals(Object right_obj) {
if (right_obj instanceof byte []) {
@@ -162,7 +158,6 @@
return false;
}
- /** {@inheritDoc} */
@Override
public String toString() {
StringBuffer sb = new StringBuffer(3*this.bytes.length);
@@ -192,7 +187,6 @@
super(ImmutableBytesWritable.class);
}
- /** {@inheritDoc} */
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return comparator.compare(b1, s1, l1, b2, s2, l2);
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java?rev=693597&r1=693596&r2=693597&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java Tue Sep 9 13:36:49 2008
@@ -43,12 +43,15 @@
private byte [] row = null;
private final HbaseMapWritable<byte [], Cell> cells;
+ /** default constructor for writable */
public RowResult() {
this(null, new HbaseMapWritable<byte [], Cell>());
}
/**
* Create a RowResult from a row and Cell map
+ * @param row
+ * @param m
*/
public RowResult (final byte [] row,
final HbaseMapWritable<byte [], Cell> m) {
@@ -58,6 +61,7 @@
/**
* Get the row for this RowResult
+ * @return the row
*/
public byte [] getRow() {
return row;
@@ -78,7 +82,7 @@
}
public Cell get(Object key) {
- return (Cell)this.cells.get(key);
+ return this.cells.get(key);
}
public Cell remove(@SuppressWarnings("unused") Object key) {
@@ -127,6 +131,8 @@
/**
* Get the Cell that corresponds to column
+ * @param column
+ * @return the Cell
*/
public Cell get(byte [] column) {
return this.cells.get(column);
@@ -134,6 +140,8 @@
/**
* Get the Cell that corresponds to column, using a String key
+ * @param key
+ * @return the Cell
*/
public Cell get(String key) {
return get(Bytes.toBytes(key));