You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/03/15 18:23:04 UTC
svn commit: r1457027 [1/2] - in /hbase/branches/0.95:
hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/
hbase-client/src/main/java/org/apache/hadoop/hbase/client/
hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/
hbase-client/src...
Author: stack
Date: Fri Mar 15 17:23:02 2013
New Revision: 1457027
URL: http://svn.apache.org/r1457027
Log:
HBASE-8101 Cleanup: findbugs and javadoc warning fixes as well as making it illegal passing null row to Put/Delete, etc.
Added:
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
Removed:
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ByteBufferOutputStream.java
Modified:
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/AccessDeniedException.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CoprocessorException.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CorruptHFileException.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LeaseException.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java
hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java
hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
hbase/branches/0.95/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java Fri Mar 15 17:23:02 2013
@@ -48,7 +48,7 @@ import java.net.UnknownHostException;
/**
* Tracks the availability of the catalog tables
* <code>.META.</code>.
- *
+ *
* This class is "read-only" in that the locations of the catalog tables cannot
* be explicitly set. Instead, ZooKeeper is used to learn of the availability
* and location of <code>.META.</code>.
@@ -65,7 +65,7 @@ public class CatalogTracker {
// servers when they needed to know of meta movement but also by
// client-side (inside in HTable) so rather than figure meta
// locations on fault, the client would instead get notifications out of zk.
- //
+ //
// But this original intent is frustrated by the fact that this class has to
// read an hbase table, the -ROOT- table, to figure out the .META. region
// location which means we depend on an HConnection. HConnection will do
@@ -110,13 +110,6 @@ public class CatalogTracker {
private boolean instantiatedzkw = false;
private Abortable abortable;
- /*
- * Do not clear this address once set. Its needed when we do
- * server shutdown processing -- we need to know who had .META. last. If you
- * want to know if the address is good, rely on {@link #metaAvailable} value.
- */
- private ServerName metaLocation;
-
private boolean stopped = false;
static final byte [] META_REGION_NAME =
@@ -147,7 +140,7 @@ public class CatalogTracker {
* @param abortable If fatal exception we'll call abort on this. May be null.
* If it is we'll use the Connection associated with the passed
* {@link Configuration} as our Abortable.
- * @throws IOException
+ * @throws IOException
*/
public CatalogTracker(final ZooKeeperWatcher zk, final Configuration conf,
Abortable abortable)
@@ -193,7 +186,7 @@ public class CatalogTracker {
* Determines current availability of catalog tables and ensures all further
* transitions of either region are tracked.
* @throws IOException
- * @throws InterruptedException
+ * @throws InterruptedException
*/
public void start() throws IOException, InterruptedException {
LOG.debug("Starting catalog tracker " + this);
@@ -235,7 +228,7 @@ public class CatalogTracker {
* not currently available.
* @return {@link ServerName} for server hosting <code>.META.</code> or null
* if none available
- * @throws InterruptedException
+ * @throws InterruptedException
*/
public ServerName getMetaLocation() throws InterruptedException {
return this.metaRegionTracker.getMetaRegionLocation();
@@ -309,8 +302,6 @@ public class CatalogTracker {
LOG.info(".META. still not available, sleeping and retrying." +
" Reason: " + e.getMessage());
}
- } catch (IOException e) {
- LOG.info("Retrying", e);
}
}
}
@@ -356,7 +347,7 @@ public class CatalogTracker {
} else {
throw ioe;
}
-
+
}
return protocol;
}
@@ -406,7 +397,7 @@ public class CatalogTracker {
}
}
LOG.info("Failed verification of " + Bytes.toStringBinary(regionName) +
- " at address=" + address + "; " + t);
+ " at address=" + address + ", exception=" + t);
return false;
}
@@ -416,7 +407,7 @@ public class CatalogTracker {
* the internal call to {@link #waitForMetaServerConnection(long)}.
* @return True if the <code>.META.</code> location is healthy.
* @throws IOException
- * @throws InterruptedException
+ * @throws InterruptedException
*/
public boolean verifyMetaRegionLocation(final long timeout)
throws InterruptedException, IOException {
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Action.java Fri Mar 15 17:23:02 2013
@@ -63,6 +63,11 @@ public class Action<R> implements Compar
}
@Override
+ public int hashCode() {
+ return this.action.hashCode();
+ }
+
+ @Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java Fri Mar 15 17:23:02 2013
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hbase.client;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
@@ -24,10 +27,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.util.Bytes;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
/**
* Performs Append operations on a single row.
* <p>
@@ -66,10 +65,22 @@ public class Append extends Mutation {
* Create a Append operation for the specified row.
* <p>
* At least one column must be appended to.
- * @param row row key
+ * @param row row key; makes a local copy of passed in array.
*/
public Append(byte[] row) {
- this.row = Arrays.copyOf(row, row.length);
+ this(row, 0, row.length);
+ }
+
+ /** Create a Append operation for the specified row.
+ * <p>
+ * At least one column must be appended to.
+ * @param rowArray Makes a copy out of this buffer.
+ * @param rowOffset
+ * @param rowLength
+ */
+ public Append(final byte [] rowArray, final int rowOffset, final int rowLength) {
+ checkRow(rowArray, rowOffset, rowLength);
+ this.row = Bytes.copy(rowArray, rowOffset, rowLength);
}
/**
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java Fri Mar 15 17:23:02 2013
@@ -19,6 +19,11 @@
package org.apache.hadoop.hbase.client;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
@@ -26,11 +31,6 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.Bytes;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
/**
* Used to perform Delete operations on a single row.
* <p>
@@ -91,8 +91,27 @@ public class Delete extends Mutation imp
* @param timestamp maximum version timestamp (only for delete row)
*/
public Delete(byte [] row, long timestamp) {
- this.row = row;
- this.ts = timestamp;
+ this(row, 0, row.length, timestamp);
+ }
+
+ /**
+ * Create a Delete operation for the specified row and timestamp.<p>
+ *
+ * If no further operations are done, this will delete all columns in all
+ * families of the specified row with a timestamp less than or equal to the
+ * specified timestamp.<p>
+ *
+ * This timestamp is ONLY used for a delete row operation. If specifying
+ * families or columns, you must specify each timestamp individually.
+ * @param rowArray We make a local copy of this passed in row.
+ * @param rowOffset
+ * @param rowLength
+ * @param ts maximum version timestamp (only for delete row)
+ */
+ public Delete(final byte [] rowArray, final int rowOffset, final int rowLength, long ts) {
+ checkRow(rowArray, rowOffset, rowLength);
+ this.row = Bytes.copy(rowArray, rowOffset, rowLength);
+ this.ts = ts;
}
/**
@@ -121,10 +140,8 @@ public class Delete extends Mutation imp
}
if (Bytes.compareTo(this.row, 0, row.length, kv.getBuffer(),
kv.getRowOffset(), kv.getRowLength()) != 0) {
- throw new IOException("The row in the recently added KeyValue "
- + Bytes.toStringBinary(kv.getBuffer(), kv.getRowOffset(),
- kv.getRowLength()) + " doesn't match the original one "
- + Bytes.toStringBinary(this.row));
+ throw new WrongRowIOException("The row in " + kv.toString() +
+ " doesn't match the original one " + Bytes.toStringBinary(this.row));
}
byte [] family = kv.getFamily();
List<? extends Cell> list = familyMap.get(family);
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java Fri Mar 15 17:23:02 2013
@@ -19,14 +19,6 @@
package org.apache.hadoop.hbase.client;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.util.Bytes;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
@@ -37,6 +29,14 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.io.TimeRange;
+import org.apache.hadoop.hbase.util.Bytes;
+
/**
* Used to perform Get operations on a single row.
* <p>
@@ -83,6 +83,7 @@ public class Get extends OperationWithAt
* @param row row key
*/
public Get(byte [] row) {
+ Mutation.checkRow(row);
this.row = row;
}
@@ -388,10 +389,18 @@ public class Get extends OperationWithAt
//Row
@Override
public int compareTo(Row other) {
+ // TODO: This is wrong. Can't have two gets the same just because on same row.
return Bytes.compareTo(this.getRow(), other.getRow());
}
@Override
+ public int hashCode() {
+ // TODO: This is wrong. Can't have two gets the same just because on same row. But it
+ // matches how equals works currently and gets rid of the findbugs warning.
+ return Bytes.hashCode(this.getRow());
+ }
+
+ @Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
@@ -400,6 +409,7 @@ public class Get extends OperationWithAt
return false;
}
Row other = (Row) obj;
+ // TODO: This is wrong. Can't have two gets the same just because on same row.
return compareTo(other) == 0;
}
-}
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java Fri Mar 15 17:23:02 2013
@@ -1350,7 +1350,7 @@ public class HBaseAdmin implements Abort
throws IOException, InterruptedException {
compact(tableNameOrRegionName, null, false);
}
-
+
/**
* Compact a column family within a table or region.
* Asynchronous operation.
@@ -1404,7 +1404,7 @@ public class HBaseAdmin implements Abort
throws IOException, InterruptedException {
compact(tableNameOrRegionName, null, true);
}
-
+
/**
* Major compact a column family within a table or region.
* Asynchronous operation.
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java Fri Mar 15 17:23:02 2013
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.client;
import java.io.IOException;
-import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
@@ -28,7 +27,6 @@ import java.util.TreeMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.TimeRange;
@@ -52,17 +50,47 @@ public class Increment extends Mutation
private TimeRange tr = new TimeRange();
/**
- * Create a Increment operation for the specified row, using an existing row
- * lock.
+ * Create a Increment operation for the specified row.
* <p>
* At least one column must be incremented.
- * @param row row key
+ * @param row row key (we will make a copy of this).
*/
public Increment(byte [] row) {
- if (row == null || row.length > HConstants.MAX_ROW_LENGTH) {
- throw new IllegalArgumentException("Row key is invalid");
+ this(row, 0, row.length);
+ }
+
+ /**
+ * Create a Increment operation for the specified row.
+ * <p>
+ * At least one column must be incremented.
+ * @param row row key (we will make a copy of this).
+ */
+ public Increment(final byte [] row, final int offset, final int length) {
+ checkRow(row, offset, length);
+ this.row = Bytes.copy(row, offset, length);
+ }
+
+ /**
+ * Add the specified KeyValue to this operation.
+ * @param cell individual Cell
+ * @return this
+ * @throws java.io.IOException e
+ */
+ @SuppressWarnings("unchecked")
+ public Increment add(Cell cell) throws IOException{
+ KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
+ byte [] family = kv.getFamily();
+ List<? extends Cell> list = getCellList(family);
+ //Checking that the row of the kv is the same as the put
+ int res = Bytes.compareTo(this.row, 0, row.length,
+ kv.getBuffer(), kv.getRowOffset(), kv.getRowLength());
+ if (res != 0) {
+ throw new WrongRowIOException("The row in " + kv.toString() +
+ " doesn't match the original one " + Bytes.toStringBinary(this.row));
}
- this.row = Arrays.copyOf(row, row.length);
+ ((List<KeyValue>)list).add(kv);
+ familyMap.put(family, list);
+ return this;
}
/**
@@ -204,11 +232,20 @@ public class Increment extends Mutation
@Override
public int compareTo(Row i) {
+ // TODO: This is wrong. Can't have two the same just because on same row.
return Bytes.compareTo(this.getRow(), i.getRow());
}
@Override
+ public int hashCode() {
+ // TODO: This is wrong. Can't have two gets the same just because on same row. But it
+ // matches how equals works currently and gets rid of the findbugs warning.
+ return Bytes.hashCode(this.getRow());
+ }
+
+ @Override
public boolean equals(Object obj) {
+ // TODO: This is wrong. Can't have two the same just because on same row.
if (this == obj) {
return true;
}
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java Fri Mar 15 17:23:02 2013
@@ -250,6 +250,7 @@ public abstract class Mutation extends O
}
/**
+ * Number of KeyValues carried by this Mutation.
* @return the total number of KeyValues
*/
public int size() {
@@ -299,4 +300,36 @@ public abstract class Mutation extends O
heapsize += getAttributeSize();
return heapsize;
}
-}
+
+ /**
+ * @param row Row to check
+ * @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
+ * > {@link HConstants#MAX_ROW_LENGTH}
+ * @return <code>row</code>
+ */
+ static byte [] checkRow(final byte [] row) {
+ return checkRow(row, 0, row == null? 0: row.length);
+ }
+
+ /**
+ * @param row Row to check
+ * @param offset
+ * @param length
+ * @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
+ * > {@link HConstants#MAX_ROW_LENGTH}
+ * @return <code>row</code>
+ */
+ static byte [] checkRow(final byte [] row, final int offset, final int length) {
+ if (row == null) {
+ throw new IllegalArgumentException("Row buffer is null");
+ }
+ if (length == 0) {
+ throw new IllegalArgumentException("Row length is 0");
+ }
+ if (length > HConstants.MAX_ROW_LENGTH) {
+ throw new IllegalArgumentException("Row length " + length + " is > " +
+ HConstants.MAX_ROW_LENGTH);
+ }
+ return row;
+ }
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java Fri Mar 15 17:23:02 2013
@@ -57,14 +57,23 @@ public class Put extends Mutation implem
/**
* Create a Put operation for the specified row, using a given timestamp.
*
- * @param row row key
+ * @param row row key; we make a copy of what we are passed to keep local.
* @param ts timestamp
*/
public Put(byte[] row, long ts) {
- if(row == null || row.length > HConstants.MAX_ROW_LENGTH) {
- throw new IllegalArgumentException("Row key is invalid");
- }
- this.row = Arrays.copyOf(row, row.length);
+ this(row, 0, row.length, ts);
+ }
+
+ /**
+ * We make a copy of the passed in row key to keep local.
+ * @param rowArray
+ * @param rowOffset
+ * @param rowLength
+ * @param ts
+ */
+ public Put(byte [] rowArray, int rowOffset, int rowLength, long ts) {
+ checkRow(rowArray, rowOffset, rowLength);
+ this.row = Bytes.copy(rowArray, rowOffset, rowLength);
this.ts = ts;
}
@@ -125,11 +134,9 @@ public class Put extends Mutation implem
//Checking that the row of the kv is the same as the put
int res = Bytes.compareTo(this.row, 0, row.length,
kv.getBuffer(), kv.getRowOffset(), kv.getRowLength());
- if(res != 0) {
- throw new IOException("The row in the recently added KeyValue " +
- Bytes.toStringBinary(kv.getBuffer(), kv.getRowOffset(),
- kv.getRowLength()) + " doesn't match the original one " +
- Bytes.toStringBinary(this.row));
+ if (res != 0) {
+ throw new WrongRowIOException("The row in " + kv.toString() +
+ " doesn't match the original one " + Bytes.toStringBinary(this.row));
}
((List<KeyValue>)list).add(kv);
familyMap.put(family, list);
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java Fri Mar 15 17:23:02 2013
@@ -17,23 +17,26 @@
*/
package org.apache.hadoop.hbase.client;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.util.Bytes;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Bytes;
+
/**
* Performs multiple mutations atomically on a single row.
* Currently {@link Put} and {@link Delete} are supported.
*
* The mutations are performed in the order in which they
* were added.
+ *
+ * <p>We compare and equate mutations based off their row so be careful putting RowMutations
+ * into Sets or using them as keys in Maps.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@@ -89,6 +92,16 @@ public class RowMutations implements Row
}
@Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (obj instanceof RowMutations) {
+ RowMutations other = (RowMutations)obj;
+ return compareTo(other) == 0;
+ }
+ return false;
+ }
+
+ @Override
public byte[] getRow() {
return row;
}
Added: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java?rev=1457027&view=auto
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java (added)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java Fri Mar 15 17:23:02 2013
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.hbase.exceptions.HBaseIOException;
+
+public class WrongRowIOException extends HBaseIOException {
+ private static final long serialVersionUID = -5849522209440123059L;
+
+ public WrongRowIOException(final String msg) {
+ super(msg);
+ }
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/AccessDeniedException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/AccessDeniedException.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/AccessDeniedException.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/AccessDeniedException.java Fri Mar 15 17:23:02 2013
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.exceptions;
-import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
/**
* Exception thrown by access-related methods.
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CoprocessorException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CoprocessorException.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CoprocessorException.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CoprocessorException.java Fri Mar 15 17:23:02 2013
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.exceptio
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
/**
* Thrown if a coprocessor encounters any exception.
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CorruptHFileException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CorruptHFileException.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CorruptHFileException.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/CorruptHFileException.java Fri Mar 15 17:23:02 2013
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
/**
* This exception is thrown when attempts to read an HFile fail due to corruption or truncation
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/DoNotRetryIOException.java Fri Mar 15 17:23:02 2013
@@ -56,4 +56,4 @@ public class DoNotRetryIOException exten
public DoNotRetryIOException(Throwable cause) {
super(cause);
}
-}
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LeaseException.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LeaseException.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LeaseException.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/LeaseException.java Fri Mar 15 17:23:02 2013
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.exceptions.DoNotRetryIOException;
/**
* Reports a problem with a lease
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java Fri Mar 15 17:23:02 2013
@@ -23,6 +23,7 @@ import com.google.protobuf.InvalidProtoc
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
/**
@@ -48,6 +49,11 @@ public class NullComparator extends Byte
}
@Override
+ public int hashCode() {
+ return 0;
+ }
+
+ @Override
public int compareTo(byte[] value, int offset, int length) {
throw new UnsupportedOperationException();
}
@@ -69,9 +75,9 @@ public class NullComparator extends Byte
*/
public static NullComparator parseFrom(final byte [] pbBytes)
throws DeserializationException {
- ComparatorProtos.NullComparator proto;
try {
- proto = ComparatorProtos.NullComparator.parseFrom(pbBytes);
+ @SuppressWarnings("unused")
+ ComparatorProtos.NullComparator proto = ComparatorProtos.NullComparator.parseFrom(pbBytes);
} catch (InvalidProtocolBufferException e) {
throw new DeserializationException(e);
}
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AuthMethod.java Fri Mar 15 17:23:02 2013
@@ -46,7 +46,7 @@ public enum AuthMethod {
private static final int FIRST_CODE = values()[0].code;
/** Return the object represented by the code. */
- private static AuthMethod valueOf(byte code) {
+ public static AuthMethod valueOf(byte code) {
final int i = (code & 0xff) - FIRST_CODE;
return i < 0 || i >= values().length ? null : values()[i];
}
Modified: hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java (original)
+++ hbase/branches/0.95/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java Fri Mar 15 17:23:02 2013
@@ -1587,7 +1587,7 @@ public class ZKUtil {
try {
getReplicationZnodesDump(zkw, sb);
} catch (KeeperException ke) {
- LOG.warn("Couldn't get the replication znode dump." + ke.getStackTrace());
+ LOG.warn("Couldn't get the replication znode dump", ke);
}
sb.append("\nQuorum Server Statistics:");
String[] servers = zkw.getQuorum().split(",");
Modified: hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java (original)
+++ hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java Fri Mar 15 17:23:02 2013
@@ -29,9 +29,10 @@ import org.junit.experimental.categories
@Category(SmallTests.class)
public class TestAttributes {
+ private static final byte [] ROW = new byte [] {'r'};
@Test
public void testPutAttributes() {
- Put put = new Put(new byte [] {});
+ Put put = new Put(ROW);
Assert.assertTrue(put.getAttributesMap().isEmpty());
Assert.assertNull(put.getAttribute("absent"));
@@ -79,7 +80,7 @@ public class TestAttributes {
@Test
public void testDeleteAttributes() {
- Delete del = new Delete(new byte [] {});
+ Delete del = new Delete(new byte [] {'r'});
Assert.assertTrue(del.getAttributesMap().isEmpty());
Assert.assertNull(del.getAttribute("absent"));
@@ -126,7 +127,7 @@ public class TestAttributes {
@Test
public void testGetId() {
- Get get = new Get(null);
+ Get get = new Get(ROW);
Assert.assertNull("Make sure id is null if unset", get.toMap().get("id"));
get.setId("myId");
Assert.assertEquals("myId", get.toMap().get("id"));
@@ -134,7 +135,7 @@ public class TestAttributes {
@Test
public void testAppendId() {
- Append append = new Append(Bytes.toBytes("testRow"));
+ Append append = new Append(ROW);
Assert.assertNull("Make sure id is null if unset", append.toMap().get("id"));
append.setId("myId");
Assert.assertEquals("myId", append.toMap().get("id"));
@@ -142,7 +143,7 @@ public class TestAttributes {
@Test
public void testDeleteId() {
- Delete delete = new Delete(new byte [] {});
+ Delete delete = new Delete(ROW);
Assert.assertNull("Make sure id is null if unset", delete.toMap().get("id"));
delete.setId("myId");
Assert.assertEquals("myId", delete.toMap().get("id"));
@@ -150,7 +151,7 @@ public class TestAttributes {
@Test
public void testPutId() {
- Put put = new Put(new byte [] {});
+ Put put = new Put(ROW);
Assert.assertNull("Make sure id is null if unset", put.toMap().get("id"));
put.setId("myId");
Assert.assertEquals("myId", put.toMap().get("id"));
@@ -163,6 +164,4 @@ public class TestAttributes {
scan.setId("myId");
Assert.assertEquals("myId", scan.toMap().get("id"));
}
-
-}
-
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java (original)
+++ hbase/branches/0.95/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java Fri Mar 15 17:23:02 2013
@@ -34,6 +34,7 @@ import org.junit.experimental.categories
// TODO: cover more test cases
@Category(SmallTests.class)
public class TestGet {
+ private static final byte [] ROW = new byte [] {'r'};
@Test
public void testAttributesSerialization() throws IOException {
Get get = new Get(Bytes.toBytes("row"));
@@ -53,7 +54,7 @@ public class TestGet {
@Test
public void testGetAttributes() {
- Get get = new Get(null);
+ Get get = new Get(ROW);
Assert.assertTrue(get.getAttributesMap().isEmpty());
Assert.assertNull(get.getAttribute("absent"));
@@ -100,11 +101,10 @@ public class TestGet {
@Test
public void testNullQualifier() {
- Get get = new Get(null);
+ Get get = new Get(ROW);
byte[] family = Bytes.toBytes("family");
get.addColumn(family, null);
Set<byte[]> qualifiers = get.getFamilyMap().get(family);
Assert.assertEquals(1, qualifiers.size());
}
-}
-
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java Fri Mar 15 17:23:02 2013
@@ -34,6 +34,9 @@ import com.google.common.primitives.Long
* regionname, from row. See KeyValue for how it has a special comparator to do .META. cells
* and yet another for -ROOT-.
*/
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+ value="UNKNOWN",
+ justification="Findbugs doesn't like the way we are negating the result of a compare in below")
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class CellComparator implements Comparator<Cell>, Serializable{
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java Fri Mar 15 17:23:02 2013
@@ -872,9 +872,11 @@ public class KeyValue implements Cell, H
/**
* Clones a KeyValue. This creates a copy, re-allocating the buffer.
* @return Fully copied clone of this KeyValue
+ * @throws CloneNotSupportedException
*/
@Override
- public KeyValue clone() {
+ public KeyValue clone() throws CloneNotSupportedException {
+ super.clone();
byte [] b = new byte[this.length];
System.arraycopy(this.bytes, this.offset, b, 0, this.length);
KeyValue ret = new KeyValue(b, 0, b.length);
@@ -886,15 +888,6 @@ public class KeyValue implements Cell, H
}
/**
- * Creates a deep copy of this KeyValue, re-allocating the buffer.
- * Same function as {@link #clone()}. Added for clarity vs shallowCopy()
- * @return Deep copy of this KeyValue
- */
- public KeyValue deepCopy() {
- return clone();
- }
-
- /**
* Creates a shallow copy of this KeyValue, reusing the data byte buffer.
* http://en.wikipedia.org/wiki/Object_copy
* @return Shallow copy of this KeyValue
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseDecoder.java Fri Mar 15 17:23:02 2013
@@ -22,12 +22,12 @@ import java.io.InputStream;
import org.apache.hadoop.hbase.Cell;
-abstract class BaseDecoder implements Codec.Decoder {
- final InputStream in;
+public abstract class BaseDecoder implements Codec.Decoder {
+ protected final InputStream in;
private boolean hasNext = true;
private Cell current = null;
- BaseDecoder(final InputStream in) {
+ public BaseDecoder(final InputStream in) {
this.in = in;
}
@@ -50,7 +50,7 @@ abstract class BaseDecoder implements Co
* @return extract a Cell
* @throws IOException
*/
- abstract Cell parseCell() throws IOException;
+ protected abstract Cell parseCell() throws IOException;
@Override
public Cell current() {
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/BaseEncoder.java Fri Mar 15 17:23:02 2013
@@ -22,7 +22,7 @@ import java.io.OutputStream;
import org.apache.hadoop.hbase.Cell;
-abstract class BaseEncoder implements Codec.Encoder {
+public abstract class BaseEncoder implements Codec.Encoder {
protected final OutputStream out;
// This encoder is 'done' once flush has been called.
protected boolean flushed = false;
@@ -34,7 +34,7 @@ abstract class BaseEncoder implements Co
@Override
public abstract void write(Cell cell) throws IOException;
- void checkFlushed() throws CodecException {
+ protected void checkFlushed() throws CodecException {
if (this.flushed) throw new CodecException("Flushed; done");
}
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java Fri Mar 15 17:23:02 2013
@@ -77,7 +77,7 @@ public class CellCodec implements Codec
super(in);
}
- Cell parseCell() throws IOException {
+ protected Cell parseCell() throws IOException {
byte [] row = readByteArray(this.in);
byte [] family = readByteArray(in);
byte [] qualifier = readByteArray(in);
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java Fri Mar 15 17:23:02 2013
@@ -66,7 +66,7 @@ public class KeyValueCodec implements Co
super(in);
}
- Cell parseCell() throws IOException {
+ protected Cell parseCell() throws IOException {
return KeyValue.iscreate(in);
}
}
Added: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java?rev=1457027&view=auto
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java (added)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java Fri Mar 15 17:23:02 2013
@@ -0,0 +1,137 @@
+/*
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.nio.channels.WritableByteChannel;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Not thread safe!
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class ByteBufferOutputStream extends OutputStream {
+
+ protected ByteBuffer buf;
+
+ public ByteBufferOutputStream(int capacity) {
+ this(capacity, false);
+ }
+
+ public ByteBufferOutputStream(int capacity, boolean useDirectByteBuffer) {
+ if (useDirectByteBuffer) {
+ buf = ByteBuffer.allocateDirect(capacity);
+ } else {
+ buf = ByteBuffer.allocate(capacity);
+ }
+ }
+
+ public int size() {
+ return buf.position();
+ }
+
+ /**
+ * This flips the underlying BB so be sure to use it _last_!
+ * @return ByteBuffer
+ */
+ public ByteBuffer getByteBuffer() {
+ buf.flip();
+ return buf;
+ }
+
+ private void checkSizeAndGrow(int extra) {
+ if ( (buf.position() + extra) > buf.limit()) {
+ // size calculation is complex, because we could overflow negative,
+ // and/or not allocate enough space. this fixes that.
+ int newSize = (int)Math.min((((long)buf.capacity()) * 2),
+ (long)(Integer.MAX_VALUE));
+ newSize = Math.max(newSize, buf.position() + extra);
+
+ ByteBuffer newBuf = ByteBuffer.allocate(newSize);
+ buf.flip();
+ newBuf.put(buf);
+ buf = newBuf;
+ }
+ }
+
+ // OutputStream
+ @Override
+ public void write(int b) throws IOException {
+ checkSizeAndGrow(Bytes.SIZEOF_BYTE);
+
+ buf.put((byte)b);
+ }
+
+ /**
+ * Writes the complete contents of this byte buffer output stream to
+ * the specified output stream argument.
+ *
+ * @param out the output stream to which to write the data.
+ * @exception IOException if an I/O error occurs.
+ */
+ public synchronized void writeTo(OutputStream out) throws IOException {
+ WritableByteChannel channel = Channels.newChannel(out);
+ ByteBuffer bb = buf.duplicate();
+ bb.flip();
+ channel.write(bb);
+ }
+
+ @Override
+ public void write(byte[] b) throws IOException {
+ checkSizeAndGrow(b.length);
+
+ buf.put(b);
+ }
+
+ @Override
+ public void write(byte[] b, int off, int len) throws IOException {
+ checkSizeAndGrow(len);
+
+ buf.put(b, off, len);
+ }
+
+ @Override
+ public void flush() throws IOException {
+ // noop
+ }
+
+ @Override
+ public void close() throws IOException {
+ // noop again. heh
+ }
+
+ public byte[] toByteArray(int offset, int length) {
+ ByteBuffer bb = buf.duplicate();
+ bb.flip();
+
+ byte[] chunk = new byte[length];
+
+ bb.position(offset);
+ bb.get(chunk, 0, length);
+ return chunk;
+ }
+}
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java Fri Mar 15 17:23:02 2013
@@ -209,8 +209,9 @@ public class DiffKeyDeltaEncoder extends
state.familyNameWithSize =
new byte[(state.familyLength & 0xff) + KeyValue.FAMILY_LENGTH_SIZE];
state.familyNameWithSize[0] = state.familyLength;
- source.read(state.familyNameWithSize, KeyValue.FAMILY_LENGTH_SIZE,
+ int read = source.read(state.familyNameWithSize, KeyValue.FAMILY_LENGTH_SIZE,
state.familyLength);
+ assert read == state.familyLength;
}
// read flag
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java Fri Mar 15 17:23:02 2013
@@ -16,14 +16,10 @@
*/
package org.apache.hadoop.hbase.io.encoding;
-import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
-import java.io.FilterOutputStream;
import java.io.IOException;
-import java.lang.reflect.Field;
import java.nio.ByteBuffer;
-import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.KeyValue;
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java Fri Mar 15 17:23:02 2013
@@ -1673,6 +1673,21 @@ public class Bytes {
}
/**
+ * Copy the byte array given in parameter and return an instance
+ * of a new byte array with the same length and the same content.
+ * @param bytes the byte array to copy from
+ * @return a copy of the given designated byte array
+ * @param offset
+ * @param length
+ */
+ public static byte [] copy(byte [] bytes, final int offset, final int length) {
+ if (bytes == null) return null;
+ byte [] result = new byte[length];
+ System.arraycopy(bytes, offset, result, 0, length);
+ return result;
+ }
+
+ /**
* Search sorted array "a" for byte "key". I can't remember if I wrote this or copied it from
* somewhere. (mcorgan)
* @param a Array to search. Entries must be sorted and unique.
Modified: hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java (original)
+++ hbase/branches/0.95/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java Fri Mar 15 17:23:02 2013
@@ -34,6 +34,9 @@ import com.google.common.primitives.Byte
* Generate list of key values which are very useful to test data block encoding
* and compression.
*/
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+ value="RV_ABSOLUTE_VALUE_OF_RANDOM_INT",
+ justification="Should probably fix")
public class RedundantKVGenerator {
// row settings
static byte[] DEFAULT_COMMON_PREFIX = new byte[0];
Modified: hbase/branches/0.95/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java (original)
+++ hbase/branches/0.95/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/DemoClient.java Fri Mar 15 17:23:02 2013
@@ -183,7 +183,6 @@ public class DemoClient {
client.mutateRow(ByteBuffer.wrap(t), ByteBuffer.wrap(valid), mutations, dummyAttributes);
// non-utf8 is now allowed in row names because HBase stores values as binary
- ByteBuffer bf = ByteBuffer.wrap(invalid);
mutations = new ArrayList<Mutation>();
mutations.add(new Mutation(false, ByteBuffer.wrap(bytes("entry:foo")), ByteBuffer.wrap(invalid), writeToWal));
Modified: hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java (original)
+++ hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java Fri Mar 15 17:23:02 2013
@@ -137,6 +137,13 @@ public class MetricsRegionSourceImpl imp
.compareTo(impl.regionWrapper.getRegionName());
}
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof MetricsRegionSourceImpl)) return false;
+ return compareTo((MetricsRegionSourceImpl)obj) == 0;
+ }
+
void snapshot(MetricsRecordBuilder mrb, boolean ignored) {
if (closed) return;
Modified: hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java (original)
+++ hbase/branches/0.95/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java Fri Mar 15 17:23:02 2013
@@ -36,6 +36,9 @@ import java.util.concurrent.TimeUnit;
* This class need to be in the o.a.h.metrics2.impl namespace as many of the variables/calls used
* are package private.
*/
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+ value="LI_LAZY_INIT_STATIC",
+ justification="Yeah, its weird but its what we want")
public class JmxCacheBuster {
private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
private static Object lock = new Object();
Modified: hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java (original)
+++ hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java Fri Mar 15 17:23:02 2013
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionse
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.regionserver.MetricsRegionSourceImpl;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@@ -138,6 +139,13 @@ public class MetricsRegionSourceImpl imp
.compareTo(impl.regionWrapper.getRegionName());
}
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) return true;
+ if (!(obj instanceof MetricsRegionSourceImpl)) return false;
+ return compareTo((MetricsRegionSourceImpl)obj) == 0;
+ }
+
void snapshot(MetricsRecordBuilder mrb, boolean ignored) {
if (closed) return;
Modified: hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java (original)
+++ hbase/branches/0.95/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java Fri Mar 15 17:23:02 2013
@@ -35,6 +35,9 @@ import java.util.concurrent.TimeUnit;
* This class need to be in the o.a.h.metrics2.impl namespace as many of the variables/calls used
* are package private.
*/
+@edu.umd.cs.findbugs.annotations.SuppressWarnings(
+ value="LI_LAZY_INIT_STATIC",
+ justification="Yeah, its weird but its what we want")
public class JmxCacheBuster {
private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
private static Object lock = new Object();
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java Fri Mar 15 17:23:02 2013
@@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.IpcProtocol;
import org.apache.hadoop.hbase.exceptions.CallerDisconnectedException;
import org.apache.hadoop.hbase.exceptions.ServerNotRunningYetException;
+import org.apache.hadoop.hbase.io.ByteBufferOutputStream;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader;
@@ -86,7 +87,6 @@ import org.apache.hadoop.hbase.security.
import org.apache.hadoop.hbase.security.SaslStatus;
import org.apache.hadoop.hbase.security.SaslUtil;
import org.apache.hadoop.hbase.security.User;
-import org.apache.hadoop.hbase.util.ByteBufferOutputStream;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java Fri Mar 15 17:23:02 2013
@@ -100,9 +100,9 @@ public class ImportTsv extends Configure
private int timestampKeyColumnIndex = DEFAULT_TIMESTAMP_COLUMN_INDEX;
- public static String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY";
+ public static final String ROWKEY_COLUMN_SPEC = "HBASE_ROW_KEY";
- public static String TIMESTAMPKEY_COLUMN_SPEC = "HBASE_TS_KEY";
+ public static final String TIMESTAMPKEY_COLUMN_SPEC = "HBASE_TS_KEY";
/**
* @param columnsSpecification the list of columns to parser out, comma separated.
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java Fri Mar 15 17:23:02 2013
@@ -41,7 +41,11 @@ public class KeyValueSortReducer extends
throws java.io.IOException, InterruptedException {
TreeSet<KeyValue> map = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
for (KeyValue kv: kvs) {
- map.add(kv.clone());
+ try {
+ map.add(kv.clone());
+ } catch (CloneNotSupportedException e) {
+ throw new java.io.IOException(e);
+ }
}
context.setStatus("Read " + map.getClass());
int index = 0;
@@ -50,4 +54,4 @@ public class KeyValueSortReducer extends
if (index > 0 && index % 100 == 0) context.setStatus("Wrote " + index);
}
}
-}
+}
\ No newline at end of file
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java Fri Mar 15 17:23:02 2013
@@ -105,15 +105,16 @@ public class DeleteTableHandler extends
LOG.error("Couldn't delete " + tempTableDir);
}
- LOG.debug("Table '" + tableName + "' archived!");
+ LOG.debug("Table '" + Bytes.toString(tableName) + "' archived!");
} finally {
+ String tableNameStr = Bytes.toString(tableName);
// 6. Update table descriptor cache
- LOG.debug("Removing '" + tableName + "' descriptor.");
+ LOG.debug("Removing '" + tableNameStr + "' descriptor.");
this.masterServices.getTableDescriptors().remove(Bytes.toString(tableName));
// 7. If entry for this table in zk, and up in AssignmentManager, remove it.
- LOG.debug("Marking '" + tableName + "' as deleted.");
- am.getZKTable().setDeletedTable(Bytes.toString(tableName));
+ LOG.debug("Marking '" + tableNameStr + "' as deleted.");
+ am.getZKTable().setDeletedTable(tableNameStr);
}
if (cpHost != null) {
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java Fri Mar 15 17:23:02 2013
@@ -19,8 +19,9 @@
package org.apache.hadoop.hbase.regionserver;
import java.io.EOFException;
-import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.FileNotFoundException;
+import org.apache.hadoop.fs.permission.FsPermission;
import java.io.InterruptedIOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
@@ -69,24 +70,17 @@ import org.apache.hadoop.fs.FSDataOutput
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.CompoundConfiguration;
-import org.apache.hadoop.hbase.exceptions.DroppedSnapshotException;
-import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.exceptions.NoSuchColumnFamilyException;
-import org.apache.hadoop.hbase.exceptions.NotServingRegionException;
-import org.apache.hadoop.hbase.exceptions.RegionTooBusyException;
-import org.apache.hadoop.hbase.exceptions.UnknownScannerException;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
@@ -100,6 +94,13 @@ import org.apache.hadoop.hbase.client.Ro
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
+import org.apache.hadoop.hbase.exceptions.DroppedSnapshotException;
+import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
+import org.apache.hadoop.hbase.exceptions.NoSuchColumnFamilyException;
+import org.apache.hadoop.hbase.exceptions.NotServingRegionException;
+import org.apache.hadoop.hbase.exceptions.RegionTooBusyException;
+import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
+import org.apache.hadoop.hbase.exceptions.UnknownScannerException;
import org.apache.hadoop.hbase.exceptions.WrongRegionException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -112,7 +113,6 @@ import org.apache.hadoop.hbase.io.hfile.
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.ipc.HBaseServer;
import org.apache.hadoop.hbase.ipc.RpcCallContext;
-import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
@@ -1826,6 +1826,10 @@ public class HRegion implements HeapSize
}
/**
+ * Row needed by below method.
+ */
+ private static final byte [] FOR_UNIT_TESTS_ONLY = Bytes.toBytes("ForUnitTestsOnly");
+ /**
* This is used only by unit tests. Not required to be a public API.
* @param familyMap map of family to edits for the given family.
* @param writeToWAL
@@ -1833,7 +1837,7 @@ public class HRegion implements HeapSize
*/
void delete(NavigableMap<byte[], List<? extends Cell>> familyMap, UUID clusterId,
boolean writeToWAL) throws IOException {
- Delete delete = new Delete(HConstants.EMPTY_BYTE_ARRAY);
+ Delete delete = new Delete(FOR_UNIT_TESTS_ONLY);
delete.setFamilyMap(familyMap);
delete.setClusterId(clusterId);
delete.setWriteToWAL(writeToWAL);
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java Fri Mar 15 17:23:02 2013
@@ -51,7 +51,6 @@ import org.apache.thrift.TException;
* thrift server dies or is shut down before everything in the queue is drained.
*
*/
-
public class IncrementCoalescer implements IncrementCoalescerMBean {
/**
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java Fri Mar 15 17:23:02 2013
@@ -261,8 +261,9 @@ public class TestZooKeeper {
HTable ipMeta = new HTable(otherConf, HConstants.META_TABLE_NAME);
// dummy, just to open the connection
- localMeta.exists(new Get(HConstants.LAST_ROW));
- ipMeta.exists(new Get(HConstants.LAST_ROW));
+ final byte [] row = new byte [] {'r'};
+ localMeta.exists(new Get(row));
+ ipMeta.exists(new Get(row));
// make sure they aren't the same
ZooKeeperWatcher z1 =
@@ -359,8 +360,26 @@ public class TestZooKeeper {
"testMasterAddressManagerFromZK", null);
// Save the previous ACL
- Stat s = new Stat();
- List<ACL> oldACL = zk.getACL("/", s);
+ Stat s = null;
+ List<ACL> oldACL = null;
+ while (true) {
+ try {
+ s = new Stat();
+ oldACL = zk.getACL("/", s);
+ break;
+ } catch (KeeperException e) {
+ switch (e.code()) {
+ case CONNECTIONLOSS:
+ case SESSIONEXPIRED:
+ case OPERATIONTIMEOUT:
+ LOG.warn("Possibly transient ZooKeeper exception", e);
+ Threads.sleep(100);
+ break;
+ default:
+ throw e;
+ }
+ }
+ }
// I set this acl after the attempted creation of the cluster home node.
// Add retries in case of retryable zk exceptions.
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java Fri Mar 15 17:23:02 2013
@@ -33,6 +33,9 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.ScannerCallable;
+import org.apache.hadoop.hbase.ipc.HBaseClient;
+import org.apache.hadoop.hbase.ipc.HBaseServer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
@@ -40,6 +43,8 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.apache.commons.logging.impl.Log4JLogger;
+import org.apache.log4j.Level;
/**
* Test {@link MetaReader}, {@link MetaEditor}.
@@ -63,10 +68,12 @@ public class TestMetaReaderEditor {
public boolean isAborted() {
return abort.get();
}
-
};
@BeforeClass public static void beforeClass() throws Exception {
+ ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL);
+ ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL);
+ ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
UTIL.startMiniCluster(3);
Configuration c = new Configuration(UTIL.getConfiguration());
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java Fri Mar 15 17:23:02 2013
@@ -4306,6 +4306,8 @@ public class TestFromClientSide {
fail("Should have thrown IllegalArgumentException");
} catch (IllegalArgumentException iax) {
// success
+ } catch (NullPointerException npe) {
+ // success
}
// try null family
try {
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java Fri Mar 15 17:23:02 2013
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Arrays;
@@ -388,20 +389,19 @@ public class TestFromClientSide3 {
table.put(put);
table.flushCommits();
- //Try getting the row with an empty row key and make sure the other base cases work as well
- Result res = table.get(new Get(new byte[0]));
- assertTrue(res.isEmpty() == true);
+ //Try getting the row with an empty row key
+ Result res = null;
+ try {
+ res = table.get(new Get(new byte[0]));
+ fail();
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ assertTrue(res == null);
res = table.get(new Get(Bytes.toBytes("r1-not-exist")));
assertTrue(res.isEmpty() == true);
res = table.get(new Get(ROW_BYTES));
assertTrue(Arrays.equals(res.getValue(FAMILY, COL_QUAL), VAL_BYTES));
-
- //Now actually put in a row with an empty row key
- put = new Put(new byte[0]);
- put.add(FAMILY, COL_QUAL, VAL_BYTES);
- table.put(put);
- table.flushCommits();
- res = table.get(new Get(new byte[0]));
- assertTrue(Arrays.equals(res.getValue(FAMILY, COL_QUAL), VAL_BYTES));
+ table.close();
}
}
Modified: hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java?rev=1457027&r1=1457026&r2=1457027&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java (original)
+++ hbase/branches/0.95/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java Fri Mar 15 17:23:02 2013
@@ -19,11 +19,12 @@
*/
package org.apache.hadoop.hbase.client;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
import java.util.ArrayList;
import java.util.List;
-import junit.framework.Assert;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -83,10 +84,12 @@ public class TestHTableMultiplexer {
// SinglePut case
for (int i = 0; i < NUM_REGIONS; i++) {
- Put put = new Put(startRows[i]);
+ byte [] row = startRows[i];
+ if (row == null || row.length <= 0) continue;
+ Put put = new Put(row);
put.add(FAMILY, QUALIFIER, VALUE1);
success = multiplexer.put(TABLE, put);
- Assert.assertTrue(success);
+ assertTrue(success);
// ensure the buffer has been flushed
verifyAllBufferedPutsHaveFlushed(status);
@@ -99,32 +102,35 @@ public class TestHTableMultiplexer {
do {
r = ht.get(get);
} while (r == null || r.getValue(FAMILY, QUALIFIER) == null);
- Assert.assertEquals(0, Bytes.compareTo(VALUE1, r.getValue(FAMILY, QUALIFIER)));
+ assertEquals(0, Bytes.compareTo(VALUE1, r.getValue(FAMILY, QUALIFIER)));
}
// MultiPut case
List<Put> multiput = new ArrayList<Put>();
for (int i = 0; i < NUM_REGIONS; i++) {
- Put put = new Put(endRows[i]);
+ byte [] row = endRows[i];
+ if (row == null || row.length <= 0) continue;
+ Put put = new Put(row);
put.add(FAMILY, QUALIFIER, VALUE2);
multiput.add(put);
}
failedPuts = multiplexer.put(TABLE, multiput);
- Assert.assertTrue(failedPuts == null);
+ assertTrue(failedPuts == null);
// ensure the buffer has been flushed
verifyAllBufferedPutsHaveFlushed(status);
// verify that the Get returns the correct result
for (int i = 0; i < NUM_REGIONS; i++) {
- Get get = new Get(endRows[i]);
+ byte [] row = endRows[i];
+ if (row == null || row.length <= 0) continue;
+ Get get = new Get(row);
get.addColumn(FAMILY, QUALIFIER);
Result r;
do {
r = ht.get(get);
} while (r == null || r.getValue(FAMILY, QUALIFIER) == null);
- Assert.assertEquals(0,
- Bytes.compareTo(VALUE2, r.getValue(FAMILY, QUALIFIER)));
+ assertEquals(0, Bytes.compareTo(VALUE2, r.getValue(FAMILY, QUALIFIER)));
}
}
@@ -141,7 +147,7 @@ public class TestHTableMultiplexer {
}
} while (status.getTotalBufferedCounter() != 0 && tries != retries);
- Assert.assertEquals("There are still some buffered puts left in the queue",
+ assertEquals("There are still some buffered puts left in the queue",
0, status.getTotalBufferedCounter());
}
}