You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2015/10/29 19:16:18 UTC
[6/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
HBASE-14675 Exorcise deprecated Put#add(...) and replace with Put#addColumn(...)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/de9555ce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/de9555ce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/de9555ce
Branch: refs/heads/master
Commit: de9555cec4c10f86226f2dc43ef1ffc69a4e65cb
Parents: 452e38f
Author: Jonathan M Hsieh <jm...@apache.org>
Authored: Sun Oct 25 14:56:12 2015 -0700
Committer: Jonathan M Hsieh <jm...@apache.org>
Committed: Thu Oct 29 11:15:31 2015 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/MetaTableAccessor.java | 2 +-
.../org/apache/hadoop/hbase/client/Put.java | 53 +--
.../hadoop/hbase/client/TestAsyncProcess.java | 2 +-
.../hbase/client/TestClientNoCluster.java | 2 +-
.../hadoop/hbase/client/TestOperation.java | 2 +-
.../hadoop/hbase/client/TestPutDotHas.java | 2 +-
.../hadoop/hbase/mapreduce/IndexBuilder.java | 2 +-
.../hadoop/hbase/mapreduce/SampleUploader.java | 3 +-
.../example/TestBulkDeleteProtocol.java | 66 +--
.../example/TestRowCountEndpoint.java | 2 +-
.../TestZooKeeperScanPolicyObserver.java | 4 +-
.../hadoop/hbase/mttr/IntegrationTestMTTR.java | 2 +-
...egrationTestBigLinkedListWithVisibility.java | 7 +-
.../test/IntegrationTestLoadAndVerify.java | 4 +-
...tionTestWithCellVisibilityLoadAndVerify.java | 2 +-
.../trace/IntegrationTestSendTraceRequests.java | 2 +-
.../hbase/rest/PerformanceEvaluation.java | 4 +-
.../hadoop/hbase/rest/TestScannerResource.java | 2 +-
.../hbase/rest/TestScannersWithFilters.java | 8 +-
.../hbase/rest/TestScannersWithLabels.java | 2 +-
.../hadoop/hbase/rest/TestTableResource.java | 2 +-
.../rest/client/TestRemoteHTableRetries.java | 4 +-
.../hbase/rest/client/TestRemoteTable.java | 38 +-
.../apache/hadoop/hbase/quotas/QuotaUtil.java | 2 +-
.../RegionMergeTransactionImpl.java | 18 +-
.../regionserver/SplitTransactionImpl.java | 10 +-
.../org/apache/hadoop/hbase/HBaseTestCase.java | 5 +-
.../hadoop/hbase/HBaseTestingUtility.java | 9 +-
.../hadoop/hbase/PerformanceEvaluation.java | 4 +-
.../apache/hadoop/hbase/TestAcidGuarantees.java | 2 +-
.../hadoop/hbase/TestHBaseTestingUtility.java | 2 +-
.../apache/hadoop/hbase/TestMultiVersions.java | 3 +-
.../org/apache/hadoop/hbase/TestNamespace.java | 2 +-
.../org/apache/hadoop/hbase/TestZooKeeper.java | 6 +-
.../TestZooKeeperTableArchiveClient.java | 2 +-
.../apache/hadoop/hbase/client/TestAdmin1.java | 10 +-
.../apache/hadoop/hbase/client/TestAdmin2.java | 4 +-
.../client/TestBlockEvictionFromClient.java | 54 +--
.../hadoop/hbase/client/TestCheckAndMutate.java | 9 +-
.../client/TestClientOperationInterrupt.java | 2 +-
.../client/TestClientScannerRPCTimeout.java | 2 +-
.../hadoop/hbase/client/TestFastFail.java | 2 +-
.../hadoop/hbase/client/TestFromClientSide.java | 465 ++++++++++---------
.../hbase/client/TestFromClientSide3.java | 16 +-
.../hbase/client/TestFromClientSideNoCodec.java | 4 +-
.../org/apache/hadoop/hbase/client/TestHCM.java | 168 +++----
.../hbase/client/TestHTableMultiplexer.java | 6 +-
.../client/TestHTableMultiplexerFlushCache.java | 2 +-
.../hbase/client/TestMetaWithReplicas.java | 2 +-
.../hadoop/hbase/client/TestMultiParallel.java | 18 +-
.../hbase/client/TestMultipleTimestamps.java | 6 +-
.../client/TestPutDeleteEtcCellIteration.java | 6 +-
.../hadoop/hbase/client/TestPutWithDelete.java | 14 +-
.../hbase/client/TestReplicaWithCluster.java | 4 +-
.../hadoop/hbase/client/TestReplicasClient.java | 14 +-
.../hbase/client/TestRpcControllerFactory.java | 4 +-
.../hadoop/hbase/client/TestScannerTimeout.java | 4 +-
.../hbase/client/TestTimestampsFilter.java | 20 +-
.../hadoop/hbase/constraint/TestConstraint.java | 17 +-
.../coprocessor/TestAggregateProtocol.java | 6 +-
.../TestBatchCoprocessorEndpoint.java | 2 +-
.../TestDoubleColumnInterpreter.java | 5 +-
.../hbase/coprocessor/TestHTableWrapper.java | 18 +-
.../coprocessor/TestOpenTableInCoprocessor.java | 4 +-
.../coprocessor/TestRegionObserverBypass.java | 32 +-
.../TestRegionObserverInterface.java | 36 +-
.../TestRegionObserverScannerOpenHook.java | 8 +-
.../coprocessor/TestRegionObserverStacking.java | 2 +-
...gionServerCoprocessorExceptionWithAbort.java | 2 +-
.../coprocessor/TestRowProcessorEndpoint.java | 10 +-
.../hbase/coprocessor/TestWALObserver.java | 2 +-
.../hbase/filter/TestDependentColumnFilter.java | 18 +-
.../apache/hadoop/hbase/filter/TestFilter.java | 32 +-
.../hbase/filter/TestFilterWithScanLimits.java | 3 +-
.../hadoop/hbase/filter/TestFilterWrapper.java | 4 +-
.../TestFuzzyRowAndColumnRangeFilter.java | 2 +-
.../filter/TestFuzzyRowFilterEndToEnd.java | 4 +-
.../filter/TestInvocationRecordFilter.java | 4 +-
.../hadoop/hbase/filter/TestScanRowPrefix.java | 2 +-
.../hadoop/hbase/fs/TestBlockReorder.java | 2 +-
.../hbase/io/encoding/TestChangingEncoding.java | 3 +-
.../hbase/io/encoding/TestEncodedSeekers.java | 2 +-
.../io/hfile/TestForceCacheImportantBlocks.java | 4 +-
.../TestScannerSelectionUsingKeyRange.java | 4 +-
.../io/hfile/TestScannerSelectionUsingTTL.java | 4 +-
.../hbase/mapred/TestTableInputFormat.java | 4 +-
.../hbase/mapred/TestTableMapReduceUtil.java | 10 +-
.../hadoop/hbase/mapreduce/TestCellCounter.java | 163 +++----
.../hadoop/hbase/mapreduce/TestCopyTable.java | 20 +-
.../hbase/mapreduce/TestHFileOutputFormat.java | 2 +-
.../hbase/mapreduce/TestHFileOutputFormat2.java | 2 +-
.../hbase/mapreduce/TestImportExport.java | 63 ++-
.../mapreduce/TestMultithreadedTableMapper.java | 3 +-
.../hadoop/hbase/mapreduce/TestRowCounter.java | 14 +-
.../hbase/mapreduce/TestTableInputFormat.java | 5 +-
.../hbase/mapreduce/TestTableMapReduce.java | 2 +-
.../hbase/mapreduce/TestTableMapReduceBase.java | 2 +-
.../hbase/mapreduce/TestTimeRangeMapRed.java | 4 +-
.../hadoop/hbase/mapreduce/TestWALPlayer.java | 4 +-
.../hbase/master/TestAssignmentListener.java | 2 +-
.../master/TestGetLastFlushedSequenceId.java | 3 +-
.../hbase/master/TestMasterTransitions.java | 3 +-
.../hadoop/hbase/master/TestWarmupRegion.java | 2 +-
.../TestSimpleRegionNormalizerOnCluster.java | 2 +-
.../MasterProcedureTestingUtility.java | 2 +-
.../hadoop/hbase/quotas/TestQuotaThrottle.java | 2 +-
.../hbase/regionserver/TestAtomicOperation.java | 14 +-
.../hbase/regionserver/TestBlocksRead.java | 2 +-
.../hbase/regionserver/TestCompactionState.java | 2 +-
.../TestCorruptedRegionStoreFile.java | 2 +-
.../regionserver/TestEncryptionKeyRotation.java | 2 +-
.../TestEncryptionRandomKeying.java | 2 +-
.../hadoop/hbase/regionserver/TestHRegion.java | 233 +++++-----
.../regionserver/TestHRegionOnCluster.java | 2 +-
.../regionserver/TestHRegionReplayEvents.java | 6 +-
.../hbase/regionserver/TestJoinedScanners.java | 6 +-
.../hbase/regionserver/TestKeepDeletes.java | 112 ++---
.../hbase/regionserver/TestMinVersions.java | 56 +--
.../hbase/regionserver/TestParallelPut.java | 4 +-
.../TestRegionMergeTransaction.java | 2 +-
.../TestRegionMergeTransactionOnCluster.java | 2 +-
.../hbase/regionserver/TestRegionReplicas.java | 2 +-
.../regionserver/TestRegionServerMetrics.java | 24 +-
.../regionserver/TestRegionServerNoMaster.java | 2 +-
.../hbase/regionserver/TestRowTooBig.java | 6 +-
.../regionserver/TestSCVFWithMiniCluster.java | 22 +-
.../TestScannerRetriableFailure.java | 2 +-
.../regionserver/TestSeekOptimizations.java | 2 +-
.../TestSplitTransactionOnCluster.java | 22 +-
.../regionserver/TestSplitWalDataLoss.java | 6 +-
.../TestStoreFileRefresherChore.java | 2 +-
.../hadoop/hbase/regionserver/TestTags.java | 30 +-
.../hbase/regionserver/TestWideScanner.java | 3 +-
.../TestCompactionWithThroughputController.java | 35 +-
.../hbase/regionserver/wal/TestDurability.java | 2 +-
.../regionserver/wal/TestLogRollAbort.java | 4 +-
.../regionserver/wal/TestLogRollPeriod.java | 2 +-
.../hbase/regionserver/wal/TestLogRolling.java | 6 +-
.../hbase/regionserver/wal/TestWALReplay.java | 12 +-
.../replication/TestMasterReplication.java | 4 +-
.../replication/TestMultiSlaveReplication.java | 4 +-
.../replication/TestPerTableCFReplication.java | 2 +-
...estReplicationChangingPeerRegionservers.java | 2 +-
.../TestReplicationDisableInactivePeer.java | 2 +-
.../replication/TestReplicationEndpoint.java | 2 +-
.../replication/TestReplicationSmallTests.java | 24 +-
.../replication/TestReplicationSyncUpTool.java | 16 +-
.../replication/TestReplicationWithTags.java | 2 +-
.../access/TestAccessControlFilter.java | 4 +-
.../security/access/TestAccessController.java | 18 +-
.../security/access/TestAccessController2.java | 4 +-
.../access/TestCellACLWithMultipleVersions.java | 106 ++---
.../hbase/security/access/TestCellACLs.java | 10 +-
.../access/TestScanEarlyTermination.java | 6 +-
.../security/access/TestTablePermissions.java | 4 +-
.../access/TestWithDisabledAuthorization.java | 8 +-
.../TestDefaultScanLabelGeneratorStack.java | 6 +-
.../TestEnforcingScanLabelGenerator.java | 6 +-
.../visibility/TestVisibilityLabels.java | 50 +-
.../TestVisibilityLabelsReplication.java | 2 +-
.../visibility/TestVisibilityLabelsWithACL.java | 2 +-
.../TestVisibilityLabelsWithDeletes.java | 78 ++--
.../TestVisibilityLabelsWithSLGStack.java | 4 +-
.../TestVisibilityLablesWithGroups.java | 6 +-
.../TestVisibilityWithCheckAuths.java | 4 +-
.../hbase/snapshot/SnapshotTestingUtils.java | 2 +-
.../hadoop/hbase/trace/TestHTraceHooks.java | 3 +-
.../hadoop/hbase/util/BaseTestHBaseFsck.java | 6 +-
.../hadoop/hbase/util/MultiThreadedUpdater.java | 2 +-
.../hadoop/hbase/util/MultiThreadedWriter.java | 6 +-
.../hbase/util/TestCoprocessorScanPolicy.java | 14 +-
.../hbase/util/TestHBaseFsckEncryption.java | 4 +-
.../hadoop/hbase/util/TestHBaseFsckOneRS.java | 10 +-
.../hadoop/hbase/util/TestHBaseFsckTwoRS.java | 6 +-
.../hadoop/hbase/util/TestMergeTable.java | 2 +-
.../apache/hadoop/hbase/util/TestMergeTool.java | 2 +-
.../util/hbck/OfflineMetaRebuildTestCore.java | 4 +-
.../hadoop/hbase/wal/TestWALFiltering.java | 2 +-
.../hbase/wal/WALPerformanceEvaluation.java | 3 +-
hbase-shell/src/main/ruby/hbase/admin.rb | 4 +-
hbase-shell/src/main/ruby/hbase/table.rb | 4 +-
181 files changed, 1365 insertions(+), 1380 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index 2fbfd9f..d3d7608 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -1713,7 +1713,7 @@ public class MetaTableAccessor {
public static Put makePutFromTableState(TableState state) {
long time = EnvironmentEdgeManager.currentTime();
Put put = new Put(state.getTableName().getName(), time);
- put.add(getTableFamily(), getStateColumn(), state.convert().toByteArray());
+ put.addColumn(getTableFamily(), getStateColumn(), state.convert().toByteArray());
return put;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
index 5e0a341..a2ca975 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
@@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.util.Bytes;
* Used to perform Put operations for a single row.
* <p>
* To perform a Put, instantiate a Put object with the row to insert to and
- * for eachumn to be inserted, execute {@link #add(byte[], byte[], byte[]) add} or
- * {@link #add(byte[], byte[], long, byte[]) add} if setting the timestamp.
+ * for eachumn to be inserted, execute {@link #addColumn(byte[], byte[], byte[]) add} or
+ * {@link #addColumn(byte[], byte[], long, byte[]) add} if setting the timestamp.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@@ -137,26 +137,13 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
* @param qualifier column qualifier
* @param value column value
* @return this
- * @deprecated Since 1.0.0. Use {@link #addColumn(byte[], byte[], byte[])}
- */
- @Deprecated
- public Put add(byte [] family, byte [] qualifier, byte [] value) {
- return addColumn(family, qualifier, value);
- }
-
- /**
- * Add the specified column and value to this Put operation.
- * @param family family name
- * @param qualifier column qualifier
- * @param value column value
- * @return this
*/
public Put addColumn(byte [] family, byte [] qualifier, byte [] value) {
return addColumn(family, qualifier, this.ts, value);
}
/**
- * See {@link #add(byte[], byte[], byte[])}. This version expects
+ * See {@link #addColumn(byte[], byte[], byte[])}. This version expects
* that the underlying arrays won't change. It's intended
* for usage internal HBase to and for advanced client applications.
*/
@@ -183,21 +170,6 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
* @param ts version timestamp
* @param value column value
* @return this
- * @deprecated Since 1.0.0. Use {@link #addColumn(byte[], byte[], long, byte[])}
- */
- @Deprecated
- public Put add(byte [] family, byte [] qualifier, long ts, byte [] value) {
- return addColumn(family, qualifier, ts, value);
- }
-
- /**
- * Add the specified column and value, with the specified timestamp as
- * its version to this Put operation.
- * @param family family name
- * @param qualifier column qualifier
- * @param ts version timestamp
- * @param value column value
- * @return this
*/
public Put addColumn(byte [] family, byte [] qualifier, long ts, byte [] value) {
if (ts < 0) {
@@ -211,7 +183,7 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
}
/**
- * See {@link #add(byte[], byte[], long, byte[])}. This version expects
+ * See {@link #addColumn(byte[], byte[], long, byte[])}. This version expects
* that the underlying arrays won't change. It's intended
* for usage internal HBase to and for advanced client applications.
*/
@@ -269,21 +241,6 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
* @param ts version timestamp
* @param value column value
* @return this
- * @deprecated Since 1.0.0. Use {@link Put#addColumn(byte[], ByteBuffer, long, ByteBuffer)}
- */
- @Deprecated
- public Put add(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
- return addColumn(family, qualifier, ts, value);
- }
-
- /**
- * Add the specified column and value, with the specified timestamp as
- * its version to this Put operation.
- * @param family family name
- * @param qualifier column qualifier
- * @param ts version timestamp
- * @param value column value
- * @return this
*/
public Put addColumn(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
if (ts < 0) {
@@ -297,7 +254,7 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
}
/**
- * See {@link #add(byte[], ByteBuffer, long, ByteBuffer)}. This version expects
+ * See {@link #addColumn(byte[], ByteBuffer, long, ByteBuffer)}. This version expects
* that the underlying arrays won't change. It's intended
* for usage internal HBase to and for advanced client applications.
*/
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index a20ca4f..b784f7a 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -1071,7 +1071,7 @@ public class TestAsyncProcess {
throw new IllegalArgumentException("unknown " + regCnt);
}
- p.add(DUMMY_BYTES_1, DUMMY_BYTES_1, DUMMY_BYTES_1);
+ p.addColumn(DUMMY_BYTES_1, DUMMY_BYTES_1, DUMMY_BYTES_1);
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index e941440..0a5a37f 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -703,7 +703,7 @@ public class TestClientNoCluster extends Configured implements Tool {
for (int i = 0; i < namespaceSpan; i++) {
byte [] b = format(rd.nextLong());
Put p = new Put(b);
- p.add(HConstants.CATALOG_FAMILY, b, b);
+ p.addColumn(HConstants.CATALOG_FAMILY, b, b);
mutator.mutate(p);
if (i % printInterval == 0) {
LOG.info("Put " + printInterval + "/" + stopWatch.elapsedMillis());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index ae0d52c..13a2567 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -337,7 +337,7 @@ public class TestOperation {
// produce a Put operation
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
// get its JSON representation, and parse it
json = put.toJSON();
parsedJSON = mapper.readValue(json, HashMap.class);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
index c269e62..b90374b 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
@@ -41,7 +41,7 @@ public class TestPutDotHas {
@Before
public void setUp() {
- put.add(FAMILY_01, QUALIFIER_01, TS, VALUE_01);
+ put.addColumn(FAMILY_01, QUALIFIER_01, TS, VALUE_01);
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 07c1755..1dab633 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -89,7 +89,7 @@ public class IndexBuilder extends Configured implements Tool {
// original: row 123 attribute:phone 555-1212
// index: row 555-1212 INDEX:ROW 123
Put put = new Put(value);
- put.add(INDEX_COLUMN, INDEX_QUALIFIER, rowKey.get());
+ put.addColumn(INDEX_COLUMN, INDEX_QUALIFIER, rowKey.get());
context.write(tableName, put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
index 011147a..18eb5a6 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
@@ -91,7 +90,7 @@ public class SampleUploader extends Configured implements Tool {
// Create Put
Put put = new Put(row);
- put.add(family, qualifier, value);
+ put.addColumn(family, qualifier, value);
// Uncomment below to disable WAL. This will improve performance but means
// you will experience data loss in the case of a RegionServer crash.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
index 930b899..317081b 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
@@ -223,8 +223,8 @@ public class TestBulkDeleteProtocol {
List<Put> puts = new ArrayList<Put>(100);
for (int j = 0; j < 100; j++) {
Put put = new Put(Bytes.toBytes(j));
- put.add(FAMILY1, QUALIFIER1, "v1".getBytes());
- put.add(FAMILY2, QUALIFIER2, "v2".getBytes());
+ put.addColumn(FAMILY1, QUALIFIER1, "v1".getBytes());
+ put.addColumn(FAMILY2, QUALIFIER2, "v2".getBytes());
puts.add(put);
}
ht.put(puts);
@@ -251,15 +251,15 @@ public class TestBulkDeleteProtocol {
for (int j = 0; j < 100; j++) {
Put put = new Put(Bytes.toBytes(j));
byte[] value = "v1".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1234L, value);
- put.add(FAMILY1, QUALIFIER2, 1234L, value);
- put.add(FAMILY1, QUALIFIER3, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1234L, value);
// Latest version values
value = "v2".getBytes();
- put.add(FAMILY1, QUALIFIER1, value);
- put.add(FAMILY1, QUALIFIER2, value);
- put.add(FAMILY1, QUALIFIER3, value);
- put.add(FAMILY1, null, value);
+ put.addColumn(FAMILY1, QUALIFIER1, value);
+ put.addColumn(FAMILY1, QUALIFIER2, value);
+ put.addColumn(FAMILY1, QUALIFIER3, value);
+ put.addColumn(FAMILY1, null, value);
puts.add(put);
}
ht.put(puts);
@@ -300,19 +300,19 @@ public class TestBulkDeleteProtocol {
Put put = new Put(Bytes.toBytes(j));
// TS = 1000L
byte[] value = "v1".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1000L, value);
- put.add(FAMILY1, QUALIFIER2, 1000L, value);
- put.add(FAMILY1, QUALIFIER3, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1000L, value);
// TS = 1234L
value = "v2".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1234L, value);
- put.add(FAMILY1, QUALIFIER2, 1234L, value);
- put.add(FAMILY1, QUALIFIER3, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1234L, value);
// Latest version values
value = "v3".getBytes();
- put.add(FAMILY1, QUALIFIER1, value);
- put.add(FAMILY1, QUALIFIER2, value);
- put.add(FAMILY1, QUALIFIER3, value);
+ put.addColumn(FAMILY1, QUALIFIER1, value);
+ put.addColumn(FAMILY1, QUALIFIER2, value);
+ put.addColumn(FAMILY1, QUALIFIER3, value);
puts.add(put);
}
ht.put(puts);
@@ -347,24 +347,24 @@ public class TestBulkDeleteProtocol {
Put put = new Put(Bytes.toBytes(j));
// TS = 1000L
byte[] value = "v1".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1000L, value);
- put.add(FAMILY1, QUALIFIER2, 1000L, value);
- put.add(FAMILY1, QUALIFIER3, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1000L, value);
// TS = 1234L
value = "v2".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1234L, value);
- put.add(FAMILY1, QUALIFIER2, 1234L, value);
- put.add(FAMILY1, QUALIFIER3, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1234L, value);
// TS = 2000L
value = "v3".getBytes();
- put.add(FAMILY1, QUALIFIER1, 2000L, value);
- put.add(FAMILY1, QUALIFIER2, 2000L, value);
- put.add(FAMILY1, QUALIFIER3, 2000L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 2000L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 2000L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 2000L, value);
// Latest version values
value = "v4".getBytes();
- put.add(FAMILY1, QUALIFIER1, value);
- put.add(FAMILY1, QUALIFIER2, value);
- put.add(FAMILY1, QUALIFIER3, value);
+ put.addColumn(FAMILY1, QUALIFIER1, value);
+ put.addColumn(FAMILY1, QUALIFIER2, value);
+ put.addColumn(FAMILY1, QUALIFIER3, value);
puts.add(put);
}
ht.put(puts);
@@ -435,9 +435,9 @@ public class TestBulkDeleteProtocol {
private Put createPut(byte[] rowkey, String value) throws IOException {
Put put = new Put(rowkey);
- put.add(FAMILY1, QUALIFIER1, value.getBytes());
- put.add(FAMILY1, QUALIFIER2, value.getBytes());
- put.add(FAMILY1, QUALIFIER3, value.getBytes());
+ put.addColumn(FAMILY1, QUALIFIER1, value.getBytes());
+ put.addColumn(FAMILY1, QUALIFIER2, value.getBytes());
+ put.addColumn(FAMILY1, QUALIFIER3, value.getBytes());
return put;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
index fd15234..1776ced 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
@@ -76,7 +76,7 @@ public class TestRowCountEndpoint {
for (int i=0; i<5; i++) {
byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes);
- p.add(TEST_FAMILY, TEST_COLUMN, iBytes);
+ p.addColumn(TEST_FAMILY, TEST_COLUMN, iBytes);
table.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
index 0430a41..e97d528 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
@@ -93,10 +93,10 @@ public class TestZooKeeperScanPolicyObserver {
long ts = now - 2000;
Put p = new Put(R);
- p.add(F, Q, ts, Q);
+ p.addColumn(F, Q, ts, Q);
t.put(p);
p = new Put(R);
- p.add(F, Q, ts+1, Q);
+ p.addColumn(F, Q, ts + 1, Q);
t.put(p);
// these two should be expired but for the override
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index 4423650..437f200 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -496,7 +496,7 @@ public class IntegrationTestMTTR {
@Override
protected boolean doAction() throws Exception {
Put p = new Put(Bytes.toBytes(RandomStringUtils.randomAlphanumeric(5)));
- p.add(FAMILY, Bytes.toBytes("\0"), Bytes.toBytes(RandomStringUtils.randomAscii(5)));
+ p.addColumn(FAMILY, Bytes.toBytes("\0"), Bytes.toBytes(RandomStringUtils.randomAscii(5)));
table.put(p);
return true;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
index adc0eb7..c908474 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
@@ -211,13 +211,14 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
for (int i = 0; i < current.length; i++) {
for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) {
Put put = new Put(current[i]);
- put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]);
+ byte[] value = prev == null ? NO_KEY : prev[i];
+ put.addColumn(FAMILY_NAME, COLUMN_PREV, value);
if (count >= 0) {
- put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
+ put.addColumn(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
}
if (id != null) {
- put.add(FAMILY_NAME, COLUMN_CLIENT, id);
+ put.addColumn(FAMILY_NAME, COLUMN_CLIENT, id);
}
visibilityExps = split[j * 2] + OR + split[(j * 2) + 1];
put.setCellVisibility(new CellVisibility(visibilityExps));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
index 623a370..e279dfb 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
@@ -242,12 +242,12 @@ public void cleanUpCluster() throws Exception {
Bytes.putLong(row, 0, byteSwapped);
Put p = new Put(row);
- p.add(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
+ p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
if (blockStart > 0) {
for (int j = 0; j < numBackReferencesPerRow; j++) {
long referredRow = blockStart - BLOCK_SIZE + rand.nextInt(BLOCK_SIZE);
Bytes.putLong(row, 0, swapLong(referredRow));
- p.add(TEST_FAMILY, row, HConstants.EMPTY_BYTE_ARRAY);
+ p.addColumn(TEST_FAMILY, row, HConstants.EMPTY_BYTE_ARRAY);
}
refsWritten.increment(1);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
index b797740..52a705b 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
@@ -174,7 +174,7 @@ public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationT
String exp = VISIBILITY_EXPS[expIdx];
byte[] row = Bytes.add(Bytes.toBytes(i), Bytes.toBytes(suffix), Bytes.toBytes(exp));
Put p = new Put(row);
- p.add(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
+ p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
p.setCellVisibility(new CellVisibility(exp));
getCounter(expIdx).increment(1);
mutator.mutate(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
index 3fa8a9c..f325aac 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
@@ -245,7 +245,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
Put p = new Put(Bytes.toBytes(rk));
for (int y = 0; y < 10; y++) {
random.nextBytes(value);
- p.add(familyName, Bytes.toBytes(random.nextLong()), value);
+ p.addColumn(familyName, Bytes.toBytes(random.nextLong()), value);
}
ht.mutate(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
index e91f873..8424bf9 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
@@ -1131,7 +1131,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
value, tags);
put.add(kv);
} else {
- put.add(FAMILY_NAME, QUALIFIER_NAME, value);
+ put.addColumn(FAMILY_NAME, QUALIFIER_NAME, value);
}
put.setDurability(writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
mutator.mutate(put);
@@ -1202,7 +1202,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
value, tags);
put.add(kv);
} else {
- put.add(FAMILY_NAME, QUALIFIER_NAME, value);
+ put.addColumn(FAMILY_NAME, QUALIFIER_NAME, value);
}
put.setDurability(writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
mutator.mutate(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index 4f4f698..5114b11 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -100,7 +100,7 @@ public class TestScannerResource {
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
- put.add(famAndQf[0], famAndQf[1], k);
+ put.addColumn(famAndQf[0], famAndQf[1], k);
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index 22ee31d..9b68806 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -149,7 +149,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
}
table.put(p);
}
@@ -157,7 +157,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
}
table.put(p);
}
@@ -167,7 +167,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
}
table.put(p);
}
@@ -175,7 +175,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
}
table.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
index 83c72e3..cb235aa 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
@@ -98,7 +98,7 @@ public class TestScannersWithLabels {
for (int i = 0; i < 9; i++) {
Put put = new Put(Bytes.toBytes("row" + i));
put.setDurability(Durability.SKIP_WAL);
- put.add(famAndQf[0], famAndQf[1], k);
+ put.addColumn(famAndQf[0], famAndQf[1], k);
put.setCellVisibility(new CellVisibility("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!"
+ TOPSECRET));
puts.add(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index b0b8fef..8fe5c98 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -108,7 +108,7 @@ public class TestTableResource {
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
- put.add(famAndQf[0], famAndQf[1], k);
+ put.addColumn(famAndQf[0], famAndQf[1], k);
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
index 5b18a6a..4a595f3 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
@@ -151,7 +151,7 @@ public class TestRemoteHTableRetries {
@Override
public void run() throws Exception {
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, put );
}
});
@@ -164,7 +164,7 @@ public class TestRemoteHTableRetries {
@Override
public void run() throws Exception {
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
Delete delete= new Delete(ROW_1);
remoteTable.checkAndDelete(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, delete );
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
index 297162b..19d0587 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
@@ -100,12 +100,12 @@ public class TestRemoteTable {
admin.createTable(htd);
try (Table table = TEST_UTIL.getConnection().getTable(TABLE)) {
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, TS_2, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, TS_2, VALUE_1);
table.put(put);
put = new Put(ROW_2);
- put.add(COLUMN_1, QUALIFIER_1, TS_1, VALUE_1);
- put.add(COLUMN_1, QUALIFIER_1, TS_2, VALUE_2);
- put.add(COLUMN_2, QUALIFIER_2, TS_2, VALUE_2);
+ put.addColumn(COLUMN_1, QUALIFIER_1, TS_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, TS_2, VALUE_2);
+ put.addColumn(COLUMN_2, QUALIFIER_2, TS_2, VALUE_2);
table.put(put);
}
remoteTable = new RemoteHTable(
@@ -282,7 +282,7 @@ public class TestRemoteTable {
@Test
public void testPut() throws IOException {
Put put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
remoteTable.put(put);
Get get = new Get(ROW_3);
@@ -296,13 +296,13 @@ public class TestRemoteTable {
List<Put> puts = new ArrayList<Put>();
put = new Put(ROW_3);
- put.add(COLUMN_2, QUALIFIER_2, VALUE_2);
+ put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_2, QUALIFIER_2, VALUE_2);
+ put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
puts.add(put);
remoteTable.put(puts);
@@ -327,8 +327,8 @@ public class TestRemoteTable {
@Test
public void testDelete() throws IOException {
Put put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
- put.add(COLUMN_2, QUALIFIER_2, VALUE_2);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
remoteTable.put(put);
Get get = new Get(ROW_3);
@@ -390,16 +390,16 @@ public class TestRemoteTable {
public void testScanner() throws IOException {
List<Put> puts = new ArrayList<Put>();
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_2);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
remoteTable.put(puts);
@@ -465,7 +465,7 @@ public class TestRemoteTable {
assertFalse(remoteTable.exists(get));
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
remoteTable.put(put);
assertTrue(remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1,
@@ -481,16 +481,16 @@ public class TestRemoteTable {
public void testIteratorScaner() throws IOException {
List<Put> puts = new ArrayList<Put>();
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_2);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
remoteTable.put(puts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
index bff648d..6d219e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
@@ -144,7 +144,7 @@ public class QuotaUtil extends QuotaTableUtil {
private static void addQuotas(final Connection connection, final byte[] rowKey,
final byte[] qualifier, final Quotas data) throws IOException {
Put put = new Put(rowKey);
- put.add(QUOTA_FAMILY_INFO, qualifier, quotasToData(data));
+ put.addColumn(QUOTA_FAMILY_INFO, qualifier, quotasToData(data));
doPut(connection, put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
index 28cb9b9..5c177d1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
@@ -410,10 +410,10 @@ public class RegionMergeTransactionImpl implements RegionMergeTransaction {
// Put for parent
Put putOfMerged = MetaTableAccessor.makePutFromRegionInfo(copyOfMerged, time);
- putOfMerged.add(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER,
- regionA.toByteArray());
- putOfMerged.add(HConstants.CATALOG_FAMILY, HConstants.MERGEB_QUALIFIER,
- regionB.toByteArray());
+ putOfMerged.addColumn(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER,
+ regionA.toByteArray());
+ putOfMerged.addColumn(HConstants.CATALOG_FAMILY, HConstants.MERGEB_QUALIFIER,
+ regionB.toByteArray());
mutations.add(putOfMerged);
// Deletes for merging regions
Delete deleteA = MetaTableAccessor.makeDeleteFromRegionInfo(regionA, time);
@@ -426,11 +426,11 @@ public class RegionMergeTransactionImpl implements RegionMergeTransaction {
@VisibleForTesting
Put addLocation(final Put p, final ServerName sn, long openSeqNum) {
- p.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
- .toBytes(sn.getHostAndPort()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
- .getStartcode()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
+ .toBytes(sn.getHostAndPort()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
+ .getStartcode()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
index fbfea8e..70d040e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
@@ -329,11 +329,11 @@ public class SplitTransactionImpl implements SplitTransaction {
@VisibleForTesting
Put addLocation(final Put p, final ServerName sn, long openSeqNum) {
- p.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
- .toBytes(sn.getHostAndPort()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
- .getStartcode()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
+ .toBytes(sn.getHostAndPort()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
+ .getStartcode()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 1fb096d..153f36b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -328,9 +328,10 @@ public abstract class HBaseTestCase extends TestCase {
byte[][] split =
KeyValue.parseColumn(Bytes.toBytes(sb.toString()));
if(split.length == 1) {
- put.add(split[0], new byte[0], t);
+ byte[] qualifier = new byte[0];
+ put.addColumn(split[0], qualifier, t);
} else {
- put.add(split[0], split[1], t);
+ put.addColumn(split[0], split[1], t);
}
put.setDurability(Durability.SKIP_WAL);
updater.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index eb1494e..6f02ab0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1951,7 +1951,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
Put put = new Put(row);
put.setDurability(writeToWAL ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
for (int i = 0; i < f.length; i++) {
- put.add(f[i], null, value != null ? value : row);
+ byte[] value1 = value != null ? value : row;
+ put.addColumn(f[i], null, value1);
}
puts.add(put);
}
@@ -2038,7 +2039,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
- put.add(f, null, k);
+ put.addColumn(f, null, k);
if (r.getWAL() == null) {
put.setDurability(Durability.SKIP_WAL);
}
@@ -2068,7 +2069,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
for (int i = startRow; i < endRow; i++) {
byte[] data = Bytes.toBytes(String.valueOf(i));
Put put = new Put(data);
- put.add(f, null, data);
+ put.addColumn(f, null, data);
t.put(put);
}
}
@@ -3415,7 +3416,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
final byte[] value = Bytes.toBytes("value_for_row_" + iRow +
"_cf_" + Bytes.toStringBinary(cf) + "_col_" + iCol + "_ts_" +
ts + "_random_" + rand.nextLong());
- put.add(cf, qual, ts, value);
+ put.addColumn(cf, qual, ts, value);
} else if (rand.nextDouble() < 0.8) {
del.deleteColumn(cf, qual, ts);
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 569ef71..1c1c56c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -1413,7 +1413,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
put.add(kv);
updateValueSize(kv.getValueLength());
} else {
- put.add(FAMILY_NAME, qualifier, value);
+ put.addColumn(FAMILY_NAME, qualifier, value);
updateValueSize(value.length);
}
}
@@ -1501,7 +1501,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
put.add(kv);
updateValueSize(kv.getValueLength());
} else {
- put.add(FAMILY_NAME, qualifier, value);
+ put.addColumn(FAMILY_NAME, qualifier, value);
updateValueSize(value.length);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
index dbb6156..27b480d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
@@ -135,7 +135,7 @@ public class TestAcidGuarantees implements Tool {
for (byte[] family : targetFamilies) {
for (int i = 0; i < NUM_COLS_TO_CHECK; i++) {
byte qualifier[] = Bytes.toBytes("col" + i);
- p.add(family, qualifier, data);
+ p.addColumn(family, qualifier, data);
}
}
table.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
index 3a9c565..dd09c37 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
@@ -98,7 +98,7 @@ public class TestHBaseTestingUtility {
Table table2 = htu2.createTable(TABLE_NAME, FAM_NAME);
Put put = new Put(ROW);
- put.add(FAM_NAME, QUAL_NAME, VALUE);
+ put.addColumn(FAM_NAME, QUAL_NAME, VALUE);
table1.put(put);
Get get = new Get(ROW);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 439e722..9bd2fe9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -218,8 +218,7 @@ public class TestMultiVersions {
for (int i = 0; i < startKeys.length; i++) {
for (int j = 0; j < timestamp.length; j++) {
Put put = new Put(rows[i], timestamp[j]);
- put.addColumn(HConstants.CATALOG_FAMILY, null, timestamp[j],
- Bytes.toBytes(timestamp[j]));
+ put.addColumn(HConstants.CATALOG_FAMILY, null, timestamp[j], Bytes.toBytes(timestamp[j]));
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
index 5208ec4..c24d8a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
@@ -248,7 +248,7 @@ public class TestNamespace {
//sanity check try to write and read from table
Table table = TEST_UTIL.getConnection().getTable(desc.getTableName());
Put p = new Put(Bytes.toBytes("row1"));
- p.add(Bytes.toBytes("my_cf"),Bytes.toBytes("my_col"),Bytes.toBytes("value1"));
+ p.addColumn(Bytes.toBytes("my_cf"), Bytes.toBytes("my_col"), Bytes.toBytes("value1"));
table.put(p);
//flush and read from disk to make sure directory changes are working
admin.flush(desc.getTableName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index ec3521c..6d4cab3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -261,8 +261,7 @@ public class TestZooKeeper {
Table table = TEST_UTIL.getConnection().getTable(desc.getTableName());
Put put = new Put(Bytes.toBytes("testrow"));
- put.add(Bytes.toBytes("fam"),
- Bytes.toBytes("col"), Bytes.toBytes("testdata"));
+ put.addColumn(Bytes.toBytes("fam"), Bytes.toBytes("col"), Bytes.toBytes("testdata"));
LOG.info("Putting table " + tableName);
table.put(put);
table.close();
@@ -611,7 +610,8 @@ public class TestZooKeeper {
int numberOfPuts;
for (numberOfPuts = 0; numberOfPuts < 6; numberOfPuts++) {
p = new Put(Bytes.toBytes(numberOfPuts));
- p.add(Bytes.toBytes("col"), Bytes.toBytes("ql"), Bytes.toBytes("value" + numberOfPuts));
+ p.addColumn(Bytes.toBytes("col"), Bytes.toBytes("ql"),
+ Bytes.toBytes("value" + numberOfPuts));
table.put(p);
}
m.getZooKeeper().close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
index eba3c0b..a28112d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
@@ -403,7 +403,7 @@ public class TestZooKeeperTableArchiveClient {
private void createHFileInRegion(Region region, byte[] columnFamily) throws IOException {
// put one row in the region
Put p = new Put(Bytes.toBytes("row"));
- p.add(columnFamily, Bytes.toBytes("Qual"), Bytes.toBytes("v1"));
+ p.addColumn(columnFamily, Bytes.toBytes("Qual"), Bytes.toBytes("v1"));
region.put(p);
// flush the region to make a store file
region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index 33c151d..0ba51b0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -241,7 +241,7 @@ public class TestAdmin1 {
final TableName table = TableName.valueOf("testDisableAndEnableTable");
Table ht = TEST_UTIL.createTable(table, HConstants.CATALOG_FAMILY);
Put put = new Put(row);
- put.add(HConstants.CATALOG_FAMILY, qualifier, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, qualifier, value);
ht.put(put);
Get get = new Get(row);
get.addColumn(HConstants.CATALOG_FAMILY, qualifier);
@@ -308,7 +308,7 @@ public class TestAdmin1 {
Table ht1 = TEST_UTIL.createTable(table1, HConstants.CATALOG_FAMILY);
Table ht2 = TEST_UTIL.createTable(table2, HConstants.CATALOG_FAMILY);
Put put = new Put(row);
- put.add(HConstants.CATALOG_FAMILY, qualifier, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, qualifier, value);
ht1.put(put);
ht2.put(put);
Get get = new Get(row);
@@ -1187,13 +1187,13 @@ public class TestAdmin1 {
List<Put> puts = new ArrayList<Put>();
byte[] qualifier = "c".getBytes();
Put put = new Put(new byte[]{(byte)'1'});
- put.add(cf, qualifier, "100".getBytes());
+ put.addColumn(cf, qualifier, "100".getBytes());
puts.add(put);
put = new Put(new byte[]{(byte)'6'});
- put.add(cf, qualifier, "100".getBytes());
+ put.addColumn(cf, qualifier, "100".getBytes());
puts.add(put);
put = new Put(new byte[]{(byte)'8'});
- put.add(cf, qualifier, "100".getBytes());
+ put.addColumn(cf, qualifier, "100".getBytes());
puts.add(put);
ht.put(puts);
ht.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 33203fc..fe311a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -218,7 +218,7 @@ public class TestAdmin2 {
byte[] value = Bytes.toBytes("somedata");
// This used to use an empty row... That must have been a bug
Put put = new Put(value);
- put.add(HConstants.CATALOG_FAMILY, HConstants.CATALOG_FAMILY, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, HConstants.CATALOG_FAMILY, value);
table.put(put);
table.close();
}
@@ -614,7 +614,7 @@ public class TestAdmin2 {
HRegionServer regionServer = TEST_UTIL.getRSForFirstRegionInTable(tableName);
for (int i = 1; i <= 256; i++) { // 256 writes should cause 8 log rolls
Put put = new Put(Bytes.toBytes("row" + String.format("%1$04d", i)));
- put.add(HConstants.CATALOG_FAMILY, null, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, null, value);
table.put(put);
if (i % 32 == 0) {
// After every 32 writes sleep to let the log roller run
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
index d175744..6dedee2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
@@ -183,10 +183,10 @@ public class TestBlockEvictionFromClient {
// insert data. 2 Rows are added
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
assertTrue(Bytes.equals(table.get(new Get(ROW)).value(), data));
// data was in memstore so don't expect any changes
@@ -214,7 +214,7 @@ public class TestBlockEvictionFromClient {
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
byte[] data2 = Bytes.add(data, data);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
Result r = table.get(new Get(ROW));
assertTrue(Bytes.equals(r.getValue(FAMILY, QUALIFIER), data));
@@ -332,16 +332,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = cacheConf.getBlockCache();
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -389,16 +389,16 @@ public class TestBlockEvictionFromClient {
regionName);
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
for (int i = 1; i < 10; i++) {
put = new Put(ROW);
- put.add(FAMILY, Bytes.toBytes("testQualifier" + i), data2);
+ put.addColumn(FAMILY, Bytes.toBytes("testQualifier" + i), data2);
table.put(put);
if (i % 2 == 0) {
region.flush(true);
@@ -406,7 +406,7 @@ public class TestBlockEvictionFromClient {
}
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -483,16 +483,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
for (int i = 1; i < 10; i++) {
put = new Put(ROW);
- put.add(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
+ put.addColumn(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
table.put(put);
if (i % 2 == 0) {
region.flush(true);
@@ -501,7 +501,7 @@ public class TestBlockEvictionFromClient {
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -577,16 +577,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = cacheConf.getBlockCache();
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -656,16 +656,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
for (int i = 1; i < 10; i++) {
put = new Put(ROW);
- put.add(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
+ put.addColumn(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
table.put(put);
if (i % 2 == 0) {
region.flush(true);
@@ -674,7 +674,7 @@ public class TestBlockEvictionFromClient {
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -827,10 +827,10 @@ public class TestBlockEvictionFromClient {
// insert data. 2 Rows are added
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
assertTrue(Bytes.equals(table.get(new Get(ROW)).value(), data));
// Should create one Hfile with 2 blocks
@@ -842,7 +842,7 @@ public class TestBlockEvictionFromClient {
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
byte[] data2 = Bytes.add(data, data);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
// flush, one new block
System.out.println("Flushing cache");
@@ -1042,14 +1042,14 @@ public class TestBlockEvictionFromClient {
private void insertData(HTable table) throws IOException {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
index b68381f..082de09 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
@@ -62,9 +62,9 @@ public class TestCheckAndMutate {
try {
// put one row
Put put = new Put(rowKey);
- put.add(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
- put.add(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
- put.add(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
+ put.addColumn(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
+ put.addColumn(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
+ put.addColumn(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
table.put(put);
// get row back and assert the values
Get get = new Get(rowKey);
@@ -102,7 +102,8 @@ public class TestCheckAndMutate {
//Test that we get a region level exception
try {
Put p = new Put(rowKey);
- p.add(new byte[]{'b', 'o', 'g', 'u', 's'}, new byte[]{'A'}, new byte[0]);
+ byte[] value = new byte[0];
+ p.addColumn(new byte[]{'b', 'o', 'g', 'u', 's'}, new byte[]{'A'}, value);
rm = new RowMutations(rowKey);
rm.add(p);
table.checkAndMutate(rowKey, family, Bytes.toBytes("A"), CompareFilter.CompareOp.EQUAL,
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
index 072098e..a4603b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
@@ -85,7 +85,7 @@ public class TestClientOperationInterrupt {
Table ht = util.createTable(tableName, new byte[][]{dummy, test});
Put p = new Put(row1);
- p.add(dummy, dummy, dummy);
+ p.addColumn(dummy, dummy, dummy);
ht.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
index 8af1d9f..515e763 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
@@ -126,7 +126,7 @@ public class TestClientScannerRPCTimeout {
private void putToTable(Table ht, byte[] rowkey) throws IOException {
Put put = new Put(rowkey);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index 4be2ef0..5ceef01 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -125,7 +125,7 @@ public class TestFastFail {
byte[] rowKey = longToByteArrayKey(i);
Put put = new Put(rowKey);
byte[] value = rowKey; // value is the same as the row key
- put.add(FAMILY, QUALIFIER, value);
+ put.addColumn(FAMILY, QUALIFIER, value);
puts.add(put);
}
try (Table table = connection.getTable(TableName.valueOf(tableName))) {