You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2015/10/29 19:16:13 UTC
[1/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
Repository: hbase
Updated Branches:
refs/heads/master 452e38ff8 -> 094d65e6f
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index c61bd78..a467071 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -208,7 +208,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
switch (mt) {
case PUT:
Put put = new Put(rowKey);
- put.add(cf, column, hashCodeBytes);
+ put.addColumn(cf, column, hashCodeBytes);
mutate(table, put, rowKeyBase, rowKey, cf, column, checkedValue);
buf.append(MutationType.PUT.getNumber());
break;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
index 4d1c286..83e207a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
@@ -105,7 +105,7 @@ public class MultiThreadedWriter extends MultiThreadedWriterBase {
byte[][] columns = dataGenerator.generateColumnsForCf(rowKey, cf);
for (byte[] column : columns) {
byte[] value = dataGenerator.generateValue(rowKey, cf, column);
- put.add(cf, column, value);
+ put.addColumn(cf, column, value);
++columnCount;
if (!isMultiPut) {
insert(table, put, rowKeyBase);
@@ -114,8 +114,8 @@ public class MultiThreadedWriter extends MultiThreadedWriterBase {
}
}
long rowKeyHash = Arrays.hashCode(rowKey);
- put.add(cf, MUTATE_INFO, HConstants.EMPTY_BYTE_ARRAY);
- put.add(cf, INCREMENT, Bytes.toBytes(rowKeyHash));
+ put.addColumn(cf, MUTATE_INFO, HConstants.EMPTY_BYTE_ARRAY);
+ put.addColumn(cf, INCREMENT, Bytes.toBytes(rowKeyHash));
if (!isMultiPut) {
insert(table, put, rowKeyBase);
numCols.addAndGet(1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
index 751130f..c988761 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
@@ -110,17 +110,17 @@ public class TestCoprocessorScanPolicy {
// set the version override to 2
Put p = new Put(R);
p.setAttribute("versions", new byte[]{});
- p.add(F, tableName.getName(), Bytes.toBytes(2));
+ p.addColumn(F, tableName.getName(), Bytes.toBytes(2));
t.put(p);
long now = EnvironmentEdgeManager.currentTime();
// insert 2 versions
p = new Put(R);
- p.add(F, Q, now, Q);
+ p.addColumn(F, Q, now, Q);
t.put(p);
p = new Put(R);
- p.add(F, Q, now+1, Q);
+ p.addColumn(F, Q, now + 1, Q);
t.put(p);
Get g = new Get(R);
g.setMaxVersions(10);
@@ -138,7 +138,7 @@ public class TestCoprocessorScanPolicy {
// insert a 3rd version
p = new Put(R);
- p.add(F, Q, now+2, Q);
+ p.addColumn(F, Q, now + 2, Q);
t.put(p);
g = new Get(R);
g.setMaxVersions(10);
@@ -172,14 +172,14 @@ public class TestCoprocessorScanPolicy {
// Set the TTL override to 3s
Put p = new Put(R);
p.setAttribute("ttl", new byte[]{});
- p.add(F, tableName.getName(), Bytes.toBytes(3000L));
+ p.addColumn(F, tableName.getName(), Bytes.toBytes(3000L));
t.put(p);
p = new Put(R);
- p.add(F, Q, ts, Q);
+ p.addColumn(F, Q, ts, Q);
t.put(p);
p = new Put(R);
- p.add(F, Q, ts+1, Q);
+ p.addColumn(F, Q, ts + 1, Q);
t.put(p);
// these two should be expired but for the override
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index 5c08f1f..0ed8711 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -109,8 +109,8 @@ public class TestHBaseFsckEncryption {
for (int i = 0; i < values.length; i++) {
for (int j = 0; j < values.length; j++) {
Put put = new Put(new byte[] { values[i], values[j] });
- put.add(Bytes.toBytes("cf"), new byte[] {}, new byte[] { values[i],
- values[j] });
+ put.addColumn(Bytes.toBytes("cf"), new byte[]{}, new byte[]{values[i],
+ values[j]});
table.put(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
index df3c69c..c1c49e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
@@ -1363,10 +1363,12 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
meta.delete(deletes);
// Mess it up by creating a fake hbase:meta entry with no associated RegionInfo
- meta.put(new Put(Bytes.toBytes(table + ",,1361911384013.810e28f59a57da91c66")).add(
- HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes.toBytes("node1:60020")));
- meta.put(new Put(Bytes.toBytes(table + ",,1361911384013.810e28f59a57da91c66")).add(
- HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(1362150791183L)));
+ meta.put(new Put(Bytes.toBytes(table + ",,1361911384013.810e28f59a57da91c66"))
+ .addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+ Bytes.toBytes("node1:60020")));
+ meta.put(new Put(Bytes.toBytes(table + ",,1361911384013.810e28f59a57da91c66"))
+ .addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,
+ Bytes.toBytes(1362150791183L)));
meta.close();
HBaseFsck hbck = doFsck(conf, false);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
index 4eb1dd8..b3bd355 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
@@ -363,7 +363,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
assertNotNull(serverName);
try (Table meta = connection.getTable(TableName.META_TABLE_NAME, tableExecutorService)) {
Put put = new Put(regionName);
- put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+ put.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
Bytes.toBytes(serverName.getHostAndPort()));
meta.put(put);
}
@@ -432,9 +432,9 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
startCode != sn.getStartcode()) {
Put put = new Put(res.getRow());
put.setDurability(Durability.SKIP_WAL);
- put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+ put.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
Bytes.toBytes(sn.getHostAndPort()));
- put.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,
+ put.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,
Bytes.toBytes(sn.getStartcode()));
meta.put(put);
hri = MetaTableAccessor.getHRegionInfo(res);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
index 480ae91..661af14 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
@@ -146,7 +146,7 @@ public class TestMergeTable {
for(int i = firstRow; i < firstRow + nrows; i++) {
Put put = new Put(Bytes.toBytes("row_" + String.format("%1$05d", i)));
put.setDurability(Durability.SKIP_WAL);
- put.add(COLUMN_NAME, null, VALUE);
+ put.addColumn(COLUMN_NAME, null, VALUE);
region.put(put);
if (i % 10000 == 0) {
LOG.info("Flushing write #" + i);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
index a1b2aa1..cb51fb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
@@ -164,7 +164,7 @@ public class TestMergeTool extends HBaseTestCase {
for (int j = 0; j < rows[i].length; j++) {
byte [] row = rows[i][j];
Put put = new Put(row);
- put.add(FAMILY, QUALIFIER, row);
+ put.addColumn(FAMILY, QUALIFIER, row);
regions[i].put(put);
}
HRegion.addRegionToMETA(meta, regions[i]);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
index 973965c..ecda77f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
@@ -150,8 +150,8 @@ public class OfflineMetaRebuildTestCore {
for (int i = 0; i < values.length; i++) {
for (int j = 0; j < values.length; j++) {
Put put = new Put(new byte[] { values[i], values[j] });
- put.add(Bytes.toBytes("fam"), new byte[] {}, new byte[] { values[i],
- values[j] });
+ put.addColumn(Bytes.toBytes("fam"), new byte[]{}, new byte[]{values[i],
+ values[j]});
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
index 8833eda..bde3e49 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
@@ -89,7 +89,7 @@ public class TestWALFiltering {
final byte[] value = Bytes.toBytes("value_for_row_" + iRow +
"_cf_" + Bytes.toStringBinary(cf) + "_col_" + iCol + "_ts_" +
ts + "_random_" + rand.nextLong());
- put.add(cf, qual, ts, value);
+ put.addColumn(cf, qual, ts, value);
} else if (rand.nextDouble() < 0.8) {
del.addColumn(cf, qual, ts);
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index 3af853b..7996c17 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -545,7 +545,8 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
for (int cf = 0; cf < numFamilies; ++cf) {
for (int q = 0; q < numQualifiers; ++q) {
rand.nextBytes(value);
- put.add(Bytes.toBytes(FAMILY_PREFIX + cf), Bytes.toBytes(QUALIFIER_PREFIX + q), value);
+ put.addColumn(Bytes.toBytes(FAMILY_PREFIX + cf),
+ Bytes.toBytes(QUALIFIER_PREFIX + q), value);
}
}
return put;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-shell/src/main/ruby/hbase/admin.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb
index 2f91ba7..e680e9c 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -853,7 +853,9 @@ module Hbase
# Write it back
put = org.apache.hadoop.hbase.client.Put.new(region_bytes)
- put.add(org.apache.hadoop.hbase.HConstants::CATALOG_FAMILY, org.apache.hadoop.hbase.HConstants::REGIONINFO_QUALIFIER, org.apache.hadoop.hbase.util.Writables.getBytes(hri))
+ put.addColumn(org.apache.hadoop.hbase.HConstants::CATALOG_FAMILY,
+ org.apache.hadoop.hbase.HConstants::REGIONINFO_QUALIFIER,
+ org.apache.hadoop.hbase.util.Writables.getBytes(hri))
meta.put(put)
end
# Apply user metadata to table/column descriptor
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-shell/src/main/ruby/hbase/table.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb b/hbase-shell/src/main/ruby/hbase/table.rb
index 9f8b673..153f07e 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -152,9 +152,9 @@ EOF
timestamp = nil
end
if timestamp
- p.add(family, qualifier, timestamp, value.to_s.to_java_bytes)
+ p.addColumn(family, qualifier, timestamp, value.to_s.to_java_bytes)
else
- p.add(family, qualifier, value.to_s.to_java_bytes)
+ p.addColumn(family, qualifier, value.to_s.to_java_bytes)
end
@table.put(p)
end
[7/8] hbase git commit: HBASE-14673 Exorcise deprecated
Delete#delete* api
Posted by jm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 342c7d1..cc8cd29 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -1136,9 +1136,9 @@ public class ThriftServerRunner implements Runnable {
addAttributes(delete, attributes);
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
- delete.deleteFamily(famAndQf[0], timestamp);
+ delete.addFamily(famAndQf[0], timestamp);
} else {
- delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
+ delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
}
table.delete(delete);
@@ -1250,9 +1250,9 @@ public class ThriftServerRunner implements Runnable {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
if (m.isDelete) {
if (famAndQf.length == 1) {
- delete.deleteFamily(famAndQf[0], timestamp);
+ delete.addFamily(famAndQf[0], timestamp);
} else {
- delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
+ delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
}
delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
: Durability.SKIP_WAL);
@@ -1310,9 +1310,9 @@ public class ThriftServerRunner implements Runnable {
if (m.isDelete) {
// no qualifier, family only.
if (famAndQf.length == 1) {
- delete.deleteFamily(famAndQf[0], timestamp);
+ delete.addFamily(famAndQf[0], timestamp);
} else {
- delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
+ delete.addColumns(famAndQf[0], famAndQf[1], timestamp);
}
delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
: Durability.SKIP_WAL);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
index bef35f6..8811e6d 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
@@ -267,22 +267,22 @@ public class ThriftUtilities {
if (column.isSetTimestamp()) {
if (in.isSetDeleteType() &&
in.getDeleteType().equals(TDeleteType.DELETE_COLUMNS))
- out.deleteColumns(column.getFamily(), column.getQualifier(), column.getTimestamp());
+ out.addColumns(column.getFamily(), column.getQualifier(), column.getTimestamp());
else
- out.deleteColumn(column.getFamily(), column.getQualifier(), column.getTimestamp());
+ out.addColumn(column.getFamily(), column.getQualifier(), column.getTimestamp());
} else {
if (in.isSetDeleteType() &&
in.getDeleteType().equals(TDeleteType.DELETE_COLUMNS))
- out.deleteColumns(column.getFamily(), column.getQualifier());
+ out.addColumns(column.getFamily(), column.getQualifier());
else
- out.deleteColumn(column.getFamily(), column.getQualifier());
+ out.addColumn(column.getFamily(), column.getQualifier());
}
} else {
if (column.isSetTimestamp()) {
- out.deleteFamily(column.getFamily(), column.getTimestamp());
+ out.addFamily(column.getFamily(), column.getTimestamp());
} else {
- out.deleteFamily(column.getFamily());
+ out.addFamily(column.getFamily());
}
}
}
[3/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
Posted by jm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index 49f36d6..7b48783 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -386,7 +386,7 @@ public class TestAtomicOperation {
RowMutations rm = new RowMutations(row);
if (op) {
Put p = new Put(row, ts);
- p.add(fam1, qual1, value1);
+ p.addColumn(fam1, qual1, value1);
p.setDurability(Durability.ASYNC_WAL);
rm.add(p);
Delete d = new Delete(row);
@@ -399,7 +399,7 @@ public class TestAtomicOperation {
d.setDurability(Durability.ASYNC_WAL);
rm.add(d);
Put p = new Put(row, ts);
- p.add(fam1, qual2, value2);
+ p.addColumn(fam1, qual2, value2);
p.setDurability(Durability.ASYNC_WAL);
rm.add(p);
}
@@ -479,7 +479,7 @@ public class TestAtomicOperation {
List<Mutation> mrm = new ArrayList<Mutation>();
if (op) {
Put p = new Put(row2, ts);
- p.add(fam1, qual1, value1);
+ p.addColumn(fam1, qual1, value1);
p.setDurability(Durability.ASYNC_WAL);
mrm.add(p);
Delete d = new Delete(row);
@@ -493,7 +493,7 @@ public class TestAtomicOperation {
mrm.add(d);
Put p = new Put(row, ts);
p.setDurability(Durability.ASYNC_WAL);
- p.add(fam1, qual1, value2);
+ p.addColumn(fam1, qual1, value2);
mrm.add(p);
}
region.mutateRowsWithLocks(mrm, rowsToLock, HConstants.NO_NONCE, HConstants.NO_NONCE);
@@ -581,7 +581,7 @@ public class TestAtomicOperation {
Put[] puts = new Put[1];
Put put = new Put(Bytes.toBytes("r1"));
- put.add(Bytes.toBytes(family), Bytes.toBytes("q1"), Bytes.toBytes("10"));
+ put.addColumn(Bytes.toBytes(family), Bytes.toBytes("q1"), Bytes.toBytes("10"));
puts[0] = put;
region.batchMutate(puts, HConstants.NO_NONCE, HConstants.NO_NONCE);
@@ -615,7 +615,7 @@ public class TestAtomicOperation {
public void doWork() throws Exception {
Put[] puts = new Put[1];
Put put = new Put(Bytes.toBytes("r1"));
- put.add(Bytes.toBytes(family), Bytes.toBytes("q1"), Bytes.toBytes("50"));
+ put.addColumn(Bytes.toBytes(family), Bytes.toBytes("q1"), Bytes.toBytes("50"));
puts[0] = put;
testStep = TestStep.PUT_STARTED;
region.batchMutate(puts, HConstants.NO_NONCE, HConstants.NO_NONCE);
@@ -632,7 +632,7 @@ public class TestAtomicOperation {
public void doWork() throws Exception {
Put[] puts = new Put[1];
Put put = new Put(Bytes.toBytes("r1"));
- put.add(Bytes.toBytes(family), Bytes.toBytes("q1"), Bytes.toBytes("11"));
+ put.addColumn(Bytes.toBytes(family), Bytes.toBytes("q1"), Bytes.toBytes("11"));
puts[0] = put;
while (testStep != TestStep.PUT_COMPLETED) {
Thread.sleep(100);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
index 900d4ff..b0a43b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
@@ -127,7 +127,7 @@ public class TestBlocksRead {
put.setDurability(Durability.SKIP_WAL);
for (long version = versionStart; version <= versionEnd; version++) {
- put.add(cf, columnBytes, version, genValue(row, col, version));
+ put.addColumn(cf, columnBytes, version, genValue(row, col, version));
}
region.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
index f809a97..22b2163 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
@@ -222,7 +222,7 @@ public class TestCompactionState {
byte[] row = Bytes.toBytes(random.nextLong());
Put p = new Put(row);
for (int j = 0; j < families.length; ++j) {
- p.add(families[ j ], qualifier, row);
+ p.addColumn(families[j], qualifier, row);
}
puts.add(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
index 7d1a39c..54dbe9b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
@@ -88,7 +88,7 @@ public class TestCorruptedRegionStoreFile {
while (rowCount < NUM_ROWS) {
Put put = new Put(Bytes.toBytes(String.format("%010d", rowCount)));
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_NAME, q, value);
+ put.addColumn(FAMILY_NAME, q, value);
table.put(put);
if ((rowCount++ % ROW_PER_FILE) == 0) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index 45a95c4..6c66c6d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -216,7 +216,7 @@ public class TestEncryptionKeyRotation {
Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
try {
table.put(new Put(Bytes.toBytes("testrow"))
- .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
+ .addColumn(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
} finally {
table.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index 0a6b2b5..ad7cf2f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -107,7 +107,7 @@ public class TestEncryptionRandomKeying {
Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
try {
table.put(new Put(Bytes.toBytes("testrow"))
- .add(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
+ .addColumn(hcd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
} finally {
table.close();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 06517d7..9b82cc5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -287,13 +287,13 @@ public class TestHRegion {
byte [] value = Bytes.toBytes(name.getMethodName());
// Make a random put against our cf.
Put put = new Put(value);
- put.add(COLUMN_FAMILY_BYTES, null, value);
+ put.addColumn(COLUMN_FAMILY_BYTES, null, value);
// First put something in current memstore, which will be in snapshot after flusher.prepare()
region.put(put);
StoreFlushContext storeFlushCtx = store.createFlushContext(12345);
storeFlushCtx.prepare();
// Second put something in current memstore
- put.add(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
+ put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
region.put(put);
// Close with something in memstore and something in the snapshot. Make sure all is cleared.
region.close();
@@ -339,7 +339,7 @@ public class TestHRegion {
faultyLog.setStoreFlushCtx(store.createFlushContext(12345));
Put put = new Put(value);
- put.add(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
+ put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
faultyLog.setFailureType(FaultyFSLog.FailureType.SYNC);
boolean threwIOE = false;
@@ -388,7 +388,7 @@ public class TestHRegion {
// Put some value and make sure flush could be completed normally
byte [] value = Bytes.toBytes(name.getMethodName());
Put put = new Put(value);
- put.add(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
+ put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
region.put(put);
long onePutSize = region.getMemstoreSize();
assertTrue(onePutSize > 0);
@@ -457,7 +457,7 @@ public class TestHRegion {
Assert.assertEquals(0, size);
// Put one item into memstore. Measure the size of one item in memstore.
Put p1 = new Put(row);
- p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[])null));
+ p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[]) null));
region.put(p1);
final long sizeOfOnePut = region.getMemstoreSize();
// Fail a flush which means the current memstore will hang out as memstore 'snapshot'.
@@ -556,7 +556,7 @@ public class TestHRegion {
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes("r1"));
- put.add(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
+ put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
region.flush(true);
@@ -604,10 +604,10 @@ public class TestHRegion {
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes("r1"));
- put.add(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
+ put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
put = new Put(Bytes.toBytes("r2"));
- put.add(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
+ put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
region.flush(true);
@@ -853,7 +853,7 @@ public class TestHRegion {
for (long i = minSeqId; i < maxSeqId; i++) {
Put put = new Put(Bytes.toBytes(i));
- put.add(family, Bytes.toBytes(i), Bytes.toBytes(i));
+ put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
region.flush(true);
}
@@ -955,7 +955,7 @@ public class TestHRegion {
for (long i = minSeqId; i < maxSeqId; i++) {
Put put = new Put(Bytes.toBytes(i));
- put.add(family, Bytes.toBytes(i), Bytes.toBytes(i));
+ put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
region.flush(true);
}
@@ -1152,7 +1152,7 @@ public class TestHRegion {
int i = 0;
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL); // have to skip mocked wal
- put.add(family, Bytes.toBytes(i), Bytes.toBytes(i));
+ put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
// 1. Test case where START_FLUSH throws exception
@@ -1440,11 +1440,11 @@ public class TestHRegion {
System.out.println(String.format("Saving row: %s, with value %s", row, value));
Put put = new Put(Bytes.toBytes(row));
put.setDurability(Durability.SKIP_WAL);
- put.add(Bytes.toBytes("trans-blob"), null, Bytes.toBytes("value for blob"));
- put.add(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
- put.add(Bytes.toBytes("trans-date"), null, Bytes.toBytes("20090921010101999"));
- put.add(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes.toBytes(value));
- put.add(Bytes.toBytes("trans-group"), null, Bytes.toBytes("adhocTransactionGroupId"));
+ put.addColumn(Bytes.toBytes("trans-blob"), null, Bytes.toBytes("value for blob"));
+ put.addColumn(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
+ put.addColumn(Bytes.toBytes("trans-date"), null, Bytes.toBytes("20090921010101999"));
+ put.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes.toBytes(value));
+ put.addColumn(Bytes.toBytes("trans-group"), null, Bytes.toBytes("adhocTransactionGroupId"));
r.put(put);
}
}
@@ -1457,7 +1457,7 @@ public class TestHRegion {
try {
Put p = new Put(b.toBytes());
byte[] cfwithcolon = Bytes.toBytes(COLUMN_FAMILY + ":");
- p.add(cfwithcolon, cfwithcolon, cfwithcolon);
+ p.addColumn(cfwithcolon, cfwithcolon, cfwithcolon);
boolean exception = false;
try {
this.region.put(p);
@@ -1486,7 +1486,7 @@ public class TestHRegion {
final Put[] puts = new Put[10];
for (int i = 0; i < 10; i++) {
puts[i] = new Put(Bytes.toBytes("row_" + i));
- puts[i].add(cf, qual, val);
+ puts[i].addColumn(cf, qual, val);
}
OperationStatus[] codes = this.region.batchMutate(puts);
@@ -1497,7 +1497,7 @@ public class TestHRegion {
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
LOG.info("Next a batch put with one invalid family");
- puts[5].add(Bytes.toBytes("BAD_CF"), qual, val);
+ puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, val);
codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
@@ -1526,9 +1526,9 @@ public class TestHRegion {
final Put[] puts = new Put[10];
for (int i = 0; i < 10; i++) {
puts[i] = new Put(Bytes.toBytes("row_" + i));
- puts[i].add(cf, qual, val);
+ puts[i].addColumn(cf, qual, val);
}
- puts[5].add(Bytes.toBytes("BAD_CF"), qual, val);
+ puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, val);
LOG.info("batchPut will have to break into four batches to avoid row locks");
RowLock rowLock1 = region.getRowLock(Bytes.toBytes("row_2"));
@@ -1633,7 +1633,7 @@ public class TestHRegion {
final Put[] puts = new Put[10];
for (int i = 0; i < 10; i++) {
puts[i] = new Put(Bytes.toBytes("row_" + i), Long.MAX_VALUE - 100);
- puts[i].add(cf, qual, val);
+ puts[i].addColumn(cf, qual, val);
}
OperationStatus[] codes = this.region.batchMutate(puts);
@@ -1668,7 +1668,7 @@ public class TestHRegion {
try {
// Putting empty data in key
Put put = new Put(row1);
- put.add(fam1, qf1, emptyVal);
+ put.addColumn(fam1, qf1, emptyVal);
// checkAndPut with empty value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(
@@ -1677,7 +1677,7 @@ public class TestHRegion {
// Putting data in key
put = new Put(row1);
- put.add(fam1, qf1, val1);
+ put.addColumn(fam1, qf1, val1);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal),
@@ -1696,7 +1696,7 @@ public class TestHRegion {
assertFalse(res);
put = new Put(row1);
- put.add(fam1, qf1, val2);
+ put.addColumn(fam1, qf1, val2);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1),
put, true);
@@ -1717,7 +1717,7 @@ public class TestHRegion {
// checkAndPut looking for a null value
put = new Put(row1);
- put.add(fam1, qf1, val1);
+ put.addColumn(fam1, qf1, val1);
res = region
.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new NullComparator(), put, true);
@@ -1742,7 +1742,7 @@ public class TestHRegion {
try {
// Putting data in key
Put put = new Put(row1);
- put.add(fam1, qf1, val1);
+ put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with wrong value
@@ -1775,7 +1775,7 @@ public class TestHRegion {
try {
// Putting data in key
Put put = new Put(row1);
- put.add(fam1, qf1, val1);
+ put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with correct value
@@ -1811,7 +1811,7 @@ public class TestHRegion {
try {
// Putting val3 in key
Put put = new Put(row1);
- put.add(fam1, qf1, val3);
+ put.addColumn(fam1, qf1, val3);
region.put(put);
// Test CompareOp.LESS: original = val3, compare with val3, fail
@@ -1827,7 +1827,7 @@ public class TestHRegion {
// Test CompareOp.LESS: original = val3, compare with val2,
// succeed (now value = val2)
put = new Put(row1);
- put.add(fam1, qf1, val2);
+ put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS,
new BinaryComparator(val2), put, true);
assertEquals(true, res);
@@ -1846,7 +1846,7 @@ public class TestHRegion {
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val1,
// succeed (now value = val3)
put = new Put(row1);
- put.add(fam1, qf1, val3);
+ put.addColumn(fam1, qf1, val3);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS_OR_EQUAL,
new BinaryComparator(val1), put, true);
assertEquals(true, res);
@@ -1864,7 +1864,7 @@ public class TestHRegion {
// Test CompareOp.GREATER: original = val3, compare with val4,
// succeed (now value = val2)
put = new Put(row1);
- put.add(fam1, qf1, val2);
+ put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER,
new BinaryComparator(val4), put, true);
assertEquals(true, res);
@@ -1907,7 +1907,7 @@ public class TestHRegion {
try {
// Putting data in the key to check
Put put = new Put(row1);
- put.add(fam1, qf1, val1);
+ put.addColumn(fam1, qf1, val1);
region.put(put);
// Creating put to add
@@ -1943,7 +1943,7 @@ public class TestHRegion {
this.region = initHRegion(tableName, this.getName(), CONF, COLUMNS);
try {
Put put = new Put(row2);
- put.add(fam1, qual1, value1);
+ put.addColumn(fam1, qual1, value1);
try {
region.checkAndMutate(row, fam1, qual1, CompareOp.EQUAL,
new BinaryComparator(value2), put, false);
@@ -1978,16 +1978,16 @@ public class TestHRegion {
try {
// Put content
Put put = new Put(row1);
- put.add(fam1, qf1, val1);
+ put.addColumn(fam1, qf1, val1);
region.put(put);
Threads.sleep(2);
put = new Put(row1);
- put.add(fam1, qf1, val2);
- put.add(fam2, qf1, val3);
- put.add(fam2, qf2, val2);
- put.add(fam2, qf3, val1);
- put.add(fam1, qf3, val1);
+ put.addColumn(fam1, qf1, val2);
+ put.addColumn(fam2, qf1, val3);
+ put.addColumn(fam2, qf2, val2);
+ put.addColumn(fam2, qf3, val1);
+ put.addColumn(fam1, qf3, val1);
region.put(put);
// Multi-column delete
@@ -2045,8 +2045,8 @@ public class TestHRegion {
byte[] value = Bytes.toBytes("value");
Put put = new Put(row1);
- put.add(fam1, qual, 1, value);
- put.add(fam1, qual, 2, value);
+ put.addColumn(fam1, qual, (long) 1, value);
+ put.addColumn(fam1, qual, (long) 2, value);
String method = this.getName();
this.region = initHRegion(tableName, method, CONF, fam1);
@@ -2130,15 +2130,15 @@ public class TestHRegion {
// add some data:
Put put = new Put(row);
- put.add(fam, splitA, Bytes.toBytes("reference_A"));
+ put.addColumn(fam, splitA, Bytes.toBytes("reference_A"));
region.put(put);
put = new Put(row);
- put.add(fam, splitB, Bytes.toBytes("reference_B"));
+ put.addColumn(fam, splitB, Bytes.toBytes("reference_B"));
region.put(put);
put = new Put(row);
- put.add(fam, serverinfo, Bytes.toBytes("ip_address"));
+ put.addColumn(fam, serverinfo, Bytes.toBytes("ip_address"));
region.put(put);
// ok now delete a split:
@@ -2161,7 +2161,7 @@ public class TestHRegion {
// Assert that after a delete, I can put.
put = new Put(row);
- put.add(fam, splitA, Bytes.toBytes("reference_A"));
+ put.addColumn(fam, splitA, Bytes.toBytes("reference_A"));
region.put(put);
get = new Get(row);
result = region.get(get);
@@ -2172,7 +2172,7 @@ public class TestHRegion {
region.delete(delete);
assertEquals(0, region.get(get).size());
- region.put(new Put(row).add(fam, splitA, Bytes.toBytes("reference_A")));
+ region.put(new Put(row).addColumn(fam, splitA, Bytes.toBytes("reference_A")));
result = region.get(get);
assertEquals(1, result.size());
} finally {
@@ -2194,7 +2194,7 @@ public class TestHRegion {
// add data in the far future
Put put = new Put(row);
- put.add(fam, serverinfo, HConstants.LATEST_TIMESTAMP - 5, Bytes.toBytes("value"));
+ put.addColumn(fam, serverinfo, HConstants.LATEST_TIMESTAMP - 5, Bytes.toBytes("value"));
region.put(put);
// now delete something in the present
@@ -2237,7 +2237,7 @@ public class TestHRegion {
// add data with LATEST_TIMESTAMP, put without WAL
Put put = new Put(row);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
region.put(put);
// Make sure it shows up with an actual timestamp
@@ -2253,7 +2253,7 @@ public class TestHRegion {
// code paths, so check both)
row = Bytes.toBytes("row2");
put = new Put(row);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
region.put(put);
// Make sure it shows up with an actual timestamp
@@ -2289,10 +2289,10 @@ public class TestHRegion {
try {
try {
// no TS specified == use latest. should not error
- region.put(new Put(row).add(fam, Bytes.toBytes("qual"), Bytes.toBytes("value")));
+ region.put(new Put(row).addColumn(fam, Bytes.toBytes("qual"), Bytes.toBytes("value")));
// TS out of range. should error
- region.put(new Put(row).add(fam, Bytes.toBytes("qual"), System.currentTimeMillis() + 2000,
- Bytes.toBytes("value")));
+ region.put(new Put(row).addColumn(fam, Bytes.toBytes("qual"),
+ System.currentTimeMillis() + 2000, Bytes.toBytes("value")));
fail("Expected IOE for TS out of configured timerange");
} catch (FailedSanityCheckException ioe) {
LOG.debug("Received expected exception", ioe);
@@ -2323,12 +2323,12 @@ public class TestHRegion {
// now create data.
Put put = new Put(rowA);
- put.add(fam2, null, value);
+ put.addColumn(fam2, null, value);
region.put(put);
put = new Put(rowB);
- put.add(fam1, null, value);
- put.add(fam2, null, value);
+ put.addColumn(fam1, null, value);
+ put.addColumn(fam2, null, value);
region.put(put);
Scan scan = new Scan();
@@ -2367,7 +2367,7 @@ public class TestHRegion {
try {
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
Put put = new Put(row);
- put.add(fam1, qual1, value1);
+ put.addColumn(fam1, qual1, value1);
region.put(put);
// now delete the value:
@@ -2375,7 +2375,7 @@ public class TestHRegion {
// ok put data:
put = new Put(row);
- put.add(fam1, qual1, value2);
+ put.addColumn(fam1, qual1, value2);
region.put(put);
// ok get:
@@ -2493,11 +2493,11 @@ public class TestHRegion {
try {
// Add to memstore
Put put = new Put(row1);
- put.add(fam1, col1, null);
- put.add(fam1, col2, null);
- put.add(fam1, col3, null);
- put.add(fam1, col4, null);
- put.add(fam1, col5, null);
+ put.addColumn(fam1, col1, null);
+ put.addColumn(fam1, col2, null);
+ put.addColumn(fam1, col3, null);
+ put.addColumn(fam1, col4, null);
+ put.addColumn(fam1, col5, null);
region.put(put);
Get get = new Get(row1);
@@ -2708,10 +2708,10 @@ public class TestHRegion {
// Putting data in Region
Put put = new Put(row1);
- put.add(fam1, null, null);
- put.add(fam2, null, null);
- put.add(fam3, null, null);
- put.add(fam4, null, null);
+ put.addColumn(fam1, null, null);
+ put.addColumn(fam2, null, null);
+ put.addColumn(fam3, null, null);
+ put.addColumn(fam4, null, null);
region.put(put);
Scan scan = null;
@@ -2791,17 +2791,17 @@ public class TestHRegion {
// Putting data in Region
Put put = null;
put = new Put(row1);
- put.add(fam1, (byte[]) null, ts, null);
- put.add(fam2, (byte[]) null, ts, null);
- put.add(fam3, (byte[]) null, ts, null);
- put.add(fam4, (byte[]) null, ts, null);
+ put.addColumn(fam1, (byte[]) null, ts, null);
+ put.addColumn(fam2, (byte[]) null, ts, null);
+ put.addColumn(fam3, (byte[]) null, ts, null);
+ put.addColumn(fam4, (byte[]) null, ts, null);
region.put(put);
put = new Put(row2);
- put.add(fam1, (byte[]) null, ts, null);
- put.add(fam2, (byte[]) null, ts, null);
- put.add(fam3, (byte[]) null, ts, null);
- put.add(fam4, (byte[]) null, ts, null);
+ put.addColumn(fam1, (byte[]) null, ts, null);
+ put.addColumn(fam2, (byte[]) null, ts, null);
+ put.addColumn(fam3, (byte[]) null, ts, null);
+ put.addColumn(fam4, (byte[]) null, ts, null);
region.put(put);
Scan scan = new Scan();
@@ -3179,23 +3179,23 @@ public class TestHRegion {
byte[] col2 = Bytes.toBytes("Pub222");
Put put = new Put(row1);
- put.add(family, col1, Bytes.toBytes(10L));
+ put.addColumn(family, col1, Bytes.toBytes(10L));
region.put(put);
put = new Put(row2);
- put.add(family, col1, Bytes.toBytes(15L));
+ put.addColumn(family, col1, Bytes.toBytes(15L));
region.put(put);
put = new Put(row3);
- put.add(family, col2, Bytes.toBytes(20L));
+ put.addColumn(family, col2, Bytes.toBytes(20L));
region.put(put);
put = new Put(row4);
- put.add(family, col2, Bytes.toBytes(30L));
+ put.addColumn(family, col2, Bytes.toBytes(30L));
region.put(put);
put = new Put(row5);
- put.add(family, col1, Bytes.toBytes(40L));
+ put.addColumn(family, col1, Bytes.toBytes(40L));
region.put(put);
Scan scan = new Scan(row3, row4);
@@ -3314,19 +3314,19 @@ public class TestHRegion {
byte[] filtered_val = Bytes.toBytes(3);
Put put = new Put(row1);
- put.add(cf_essential, col_normal, Bytes.toBytes(1));
- put.add(cf_joined, col_alpha, Bytes.toBytes(1));
+ put.addColumn(cf_essential, col_normal, Bytes.toBytes(1));
+ put.addColumn(cf_joined, col_alpha, Bytes.toBytes(1));
region.put(put);
put = new Put(row2);
- put.add(cf_essential, col_alpha, Bytes.toBytes(2));
- put.add(cf_joined, col_normal, Bytes.toBytes(2));
- put.add(cf_alpha, col_alpha, Bytes.toBytes(2));
+ put.addColumn(cf_essential, col_alpha, Bytes.toBytes(2));
+ put.addColumn(cf_joined, col_normal, Bytes.toBytes(2));
+ put.addColumn(cf_alpha, col_alpha, Bytes.toBytes(2));
region.put(put);
put = new Put(row3);
- put.add(cf_essential, col_normal, filtered_val);
- put.add(cf_joined, col_normal, filtered_val);
+ put.addColumn(cf_essential, col_normal, filtered_val);
+ put.addColumn(cf_joined, col_normal, filtered_val);
region.put(put);
// Check two things:
@@ -3379,11 +3379,11 @@ public class TestHRegion {
for (int i = 0; i < 10; i++) {
put = new Put(Bytes.toBytes("r" + Integer.toString(i)));
- put.add(cf_first, col_a, Bytes.toBytes(i));
+ put.addColumn(cf_first, col_a, Bytes.toBytes(i));
if (i < 5) {
- put.add(cf_first, col_b, Bytes.toBytes(i));
- put.add(cf_second, col_a, Bytes.toBytes(i));
- put.add(cf_second, col_b, Bytes.toBytes(i));
+ put.addColumn(cf_first, col_b, Bytes.toBytes(i));
+ put.addColumn(cf_second, col_a, Bytes.toBytes(i));
+ put.addColumn(cf_second, col_b, Bytes.toBytes(i));
}
region.put(put);
}
@@ -3717,7 +3717,7 @@ public class TestHRegion {
for (long i = 0; i < numRows; i++) {
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL);
- put.add(family, qual1, Bytes.toBytes(i % 10));
+ put.addColumn(family, qual1, Bytes.toBytes(i % 10));
region.put(put);
if (i != 0 && i % compactInterval == 0) {
@@ -3951,7 +3951,7 @@ public class TestHRegion {
byte[] value = Bytes.toBytes(String.valueOf(numPutsFinished));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
- put.add(family, qualifier, (long) numPutsFinished, value);
+ put.addColumn(family, qualifier, (long) numPutsFinished, value);
}
}
region.put(put);
@@ -4128,7 +4128,7 @@ public class TestHRegion {
this.region = initHRegion(tableName, method, CONF, family);
try {
Put put = new Put(Bytes.toBytes(1L));
- put.add(family, qual1, 1L, Bytes.toBytes(1L));
+ put.addColumn(family, qual1, 1L, Bytes.toBytes(1L));
region.put(put);
region.flush(true);
@@ -4137,7 +4137,7 @@ public class TestHRegion {
region.delete(delete);
put = new Put(Bytes.toBytes(2L));
- put.add(family, qual1, 2L, Bytes.toBytes(2L));
+ put.addColumn(family, qual1, 2L, Bytes.toBytes(2L));
region.put(put);
Scan idxScan = new Scan();
@@ -4185,7 +4185,8 @@ public class TestHRegion {
for (int j = 0; j < num_unique_rows; j++) {
Put put = new Put(Bytes.toBytes("row" + j));
put.setDurability(Durability.SKIP_WAL);
- put.add(fam1, qf1, version++, val1);
+ long ts = version++;
+ put.addColumn(fam1, qf1, ts, val1);
region.put(put);
}
}
@@ -4238,7 +4239,7 @@ public class TestHRegion {
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
for (long idx = 1; idx <= 4; idx++) {
- put.add(FAMILY, column, idx, Bytes.toBytes("value-version-" + idx));
+ put.addColumn(FAMILY, column, idx, Bytes.toBytes("value-version-" + idx));
}
region.put(put);
@@ -4285,7 +4286,7 @@ public class TestHRegion {
byte col[] = Bytes.toBytes("col1");
Put put = new Put(row);
- put.add(familyName, col, 1, Bytes.toBytes("SomeRandomValue"));
+ put.addColumn(familyName, col, (long) 1, Bytes.toBytes("SomeRandomValue"));
region.put(put);
region.flush(true);
@@ -4332,8 +4333,8 @@ public class TestHRegion {
byte col[] = Bytes.toBytes("col1");
Put put = new Put(row);
- put.add(fam1, col, 1, Bytes.toBytes("test1"));
- put.add(fam2, col, 1, Bytes.toBytes("test2"));
+ put.addColumn(fam1, col, (long) 1, Bytes.toBytes("test1"));
+ put.addColumn(fam2, col, (long) 1, Bytes.toBytes("test2"));
ht.put(put);
HRegion firstRegion = htu.getHBaseCluster().getRegions(TableName.valueOf(this.getName()))
@@ -4662,7 +4663,7 @@ public class TestHRegion {
put = new Put(row);
value = Bytes.toBytes("value0");
- put.add(family, qualifier, 1234567l, value);
+ put.addColumn(family, qualifier, 1234567l, value);
region.put(put);
get = new Get(row);
get.addColumn(family, qualifier);
@@ -4683,7 +4684,7 @@ public class TestHRegion {
put = new Put(row);
value = Bytes.toBytes("value1");
- put.add(family, qualifier, 1234567l, value);
+ put.addColumn(family, qualifier, 1234567l, value);
region.put(put);
get = new Get(row);
get.addColumn(family, qualifier);
@@ -4771,7 +4772,7 @@ public class TestHRegion {
new byte[][] { family });
Put put = new Put(Bytes.toBytes("r1"));
- put.add(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
+ put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
put.setDurability(mutationDurability);
region.put(put);
@@ -4987,7 +4988,7 @@ public class TestHRegion {
Put put = new Put(Bytes.toBytes("" + i));
put.setDurability(durability);
for (byte[] family : families) {
- put.add(family, qf, null);
+ put.addColumn(family, qf, null);
}
region.put(put);
}
@@ -5081,7 +5082,7 @@ public class TestHRegion {
// Flush enough files to get up to the threshold, doesn't need compactions
for (int i = 0; i < 2; i++) {
- Put put = new Put(tableName.toBytes()).add(family, family, tableName.toBytes());
+ Put put = new Put(tableName.toBytes()).addColumn(family, family, tableName.toBytes());
region.put(put);
fr = region.flush(true);
assertTrue(fr.isFlushSucceeded());
@@ -5090,7 +5091,7 @@ public class TestHRegion {
// Two flushes after the threshold, compactions are needed
for (int i = 0; i < 2; i++) {
- Put put = new Put(tableName.toBytes()).add(family, family, tableName.toBytes());
+ Put put = new Put(tableName.toBytes()).addColumn(family, family, tableName.toBytes());
region.put(put);
fr = region.flush(true);
assertTrue(fr.isFlushSucceeded());
@@ -5793,11 +5794,11 @@ public class TestHRegion {
try {
// setup with one storefile and one memstore, to create scanner and get an earlier readPt
Put put = new Put(Bytes.toBytes("19998"));
- put.add(cf1, col, Bytes.toBytes("val"));
+ put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
region.flushcache(true, true);
Put put2 = new Put(Bytes.toBytes("19997"));
- put2.add(cf1, col, Bytes.toBytes("val"));
+ put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
Scan scan = new Scan(Bytes.toBytes("19998"));
@@ -5808,7 +5809,7 @@ public class TestHRegion {
// to check StoreFileScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes(""+i));
- p.add(cf1, col, Bytes.toBytes(""+i));
+ p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
region.flushcache(true, true);
@@ -5817,7 +5818,7 @@ public class TestHRegion {
// to check MemStoreScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes(""+i));
- p.add(cf1, col, Bytes.toBytes(""+i));
+ p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
@@ -5944,7 +5945,7 @@ public class TestHRegion {
Assert.assertEquals(0L, region.getWriteRequestsCount());
Put put = new Put(row);
- put.add(fam, fam, fam);
+ put.addColumn(fam, fam, fam);
Assert.assertEquals(0L, region.getWriteRequestsCount());
region.put(put);
@@ -5981,7 +5982,7 @@ public class TestHRegion {
assertNotNull(region);
// create a file in fam1 for the region before opening in OpenRegionHandler
- region.put(new Put(Bytes.toBytes("a")).add(fam1, fam1, fam1));
+ region.put(new Put(Bytes.toBytes("a")).addColumn(fam1, fam1, fam1));
region.flush(true);
HBaseTestingUtility.closeRegionAndWAL(region);
@@ -6086,7 +6087,7 @@ public class TestHRegion {
assertNotNull(region);
// create a file in fam1 for the region before opening in OpenRegionHandler
- region.put(new Put(Bytes.toBytes("a")).add(fam1, fam1, fam1));
+ region.put(new Put(Bytes.toBytes("a")).addColumn(fam1, fam1, fam1));
region.flush(true);
HBaseTestingUtility.closeRegionAndWAL(region);
@@ -6319,14 +6320,14 @@ public class TestHRegion {
// TTL tags specify ts in milliseconds
new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } )));
// Add a cell that will expire after 10 seconds via family setting
- region.put(new Put(row).add(fam1, q2, now, HConstants.EMPTY_BYTE_ARRAY));
+ region.put(new Put(row).addColumn(fam1, q2, now, HConstants.EMPTY_BYTE_ARRAY));
// Add a cell that will expire in 15 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q3, now + 10000 - 1,
HConstants.EMPTY_BYTE_ARRAY, new Tag[] {
// TTL tags specify ts in milliseconds
new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } )));
// Add a cell that will expire in 20 seconds via family setting
- region.put(new Put(row).add(fam1, q4, now + 10000 - 1, HConstants.EMPTY_BYTE_ARRAY));
+ region.put(new Put(row).addColumn(fam1, q4, now + 10000 - 1, HConstants.EMPTY_BYTE_ARRAY));
// Flush so we are sure store scanning gets this right
region.flush(true);
@@ -6377,7 +6378,7 @@ public class TestHRegion {
// Fun with disappearing increments
// Start at 1
- region.put(new Put(row).add(fam1, q1, Bytes.toBytes(1L)));
+ region.put(new Put(row).addColumn(fam1, q1, Bytes.toBytes(1L)));
r = region.get(new Get(row));
byte[] val = r.getValue(fam1, q1);
assertNotNull(val);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
index aa45ab9..98d98aa 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
@@ -145,7 +145,7 @@ public class TestHRegionOnCluster {
String value, int verifyNum) throws IOException {
System.out.println("=========Putting data :" + row);
Put put = new Put(Bytes.toBytes(row));
- put.add(family, Bytes.toBytes("q1"), Bytes.toBytes(value));
+ put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes(value));
table.put(put);
ResultScanner resultScanner = table.getScanner(new Scan());
List<Result> results = new ArrayList<Result>();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index 04e9b56..0c2e01c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -1078,7 +1078,7 @@ public class TestHRegionReplayEvents {
long readPoint = region.getMVCC().getReadPoint();
long origSeqId = readPoint + 100;
- Put put = new Put(row).add(family, row, row);
+ Put put = new Put(row).addColumn(family, row, row);
put.setDurability(Durability.SKIP_WAL); // we replay with skip wal
replay(region, put, origSeqId);
@@ -1091,7 +1091,7 @@ public class TestHRegionReplayEvents {
// replay an entry that is smaller than current read point
// caution: adding an entry below current read point might cause partial dirty reads. Normal
// replay does not allow reads while replay is going on.
- put = new Put(row2).add(family, row2, row2);
+ put = new Put(row2).addColumn(family, row2, row2);
put.setDurability(Durability.SKIP_WAL);
replay(region, put, origSeqId - 50);
@@ -1628,7 +1628,7 @@ public class TestHRegionReplayEvents {
Put put = new Put(Bytes.toBytes("" + i));
put.setDurability(Durability.SKIP_WAL);
for (byte[] family : families) {
- put.add(family, qf, EnvironmentEdgeManager.currentTime(), null);
+ put.addColumn(family, qf, EnvironmentEdgeManager.currentTime(), null);
}
replay(region, put, i+1);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
index 896cd5d..67c5f51 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java
@@ -113,11 +113,11 @@ public class TestJoinedScanners {
for (long i = 0; i < rows_to_insert; i++) {
Put put = new Put(Bytes.toBytes(Long.toString (i)));
if (rand.nextInt(100) <= selectionRatio) {
- put.add(cf_essential, col_name, flag_yes);
+ put.addColumn(cf_essential, col_name, flag_yes);
} else {
- put.add(cf_essential, col_name, flag_no);
+ put.addColumn(cf_essential, col_name, flag_no);
}
- put.add(cf_joined, col_name, val_large);
+ put.addColumn(cf_joined, col_name, val_large);
puts.add(put);
if (puts.size() >= insert_batch) {
ht.put(puts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index d19d709..bb72b1d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -103,16 +103,16 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
p = new Put(T1, ts+1);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
p = new Put(T1, ts+2);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
p = new Put(T1, ts+4);
- p.add(c0, c0, T4);
+ p.addColumn(c0, c0, T4);
region.put(p);
// now place a delete marker at ts+2
@@ -156,16 +156,16 @@ public class TestKeepDeletes {
// two more puts, this will expire the older puts.
p = new Put(T1, ts+5);
- p.add(c0, c0, T5);
+ p.addColumn(c0, c0, T5);
region.put(p);
p = new Put(T1, ts+6);
- p.add(c0, c0, T6);
+ p.addColumn(c0, c0, T6);
region.put(p);
// also add an old put again
// (which is past the max versions)
p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
r = region.get(g);
assertTrue(r.isEmpty());
@@ -200,7 +200,7 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
Delete d = new Delete(T1, ts);
@@ -245,7 +245,7 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
Get gOne = new Get(T1);
@@ -322,13 +322,13 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
p = new Put(T1, ts+2);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
p = new Put(T1, ts+4);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
Delete d = new Delete(T1, ts+1);
@@ -456,12 +456,12 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// a put into another store (CF) should have no effect
p = new Put(T1, ts-10);
- p.add(c1, c0, T1);
+ p.addColumn(c1, c0, T1);
region.put(p);
// all the following deletes affect the put
@@ -491,7 +491,7 @@ public class TestKeepDeletes {
// another put will push out the earlier put...
p = new Put(T1, ts+3);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
region.flush(true);
@@ -519,12 +519,12 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// a put another (older) row in the same store
p = new Put(T2, ts-10);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// all the following deletes affect the put
@@ -554,7 +554,7 @@ public class TestKeepDeletes {
// another put will push out the earlier put...
p = new Put(T1, ts+3);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
region.flush(true);
@@ -569,7 +569,7 @@ public class TestKeepDeletes {
// another put will push out the earlier put...
p = new Put(T1, ts+4);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// this pushed out the column and version marker
@@ -596,31 +596,31 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
- p.add(c0, c1, T1);
- p.add(c1, c0, T1);
- p.add(c1, c1, T1);
+ p.addColumn(c0, c0, T1);
+ p.addColumn(c0, c1, T1);
+ p.addColumn(c1, c0, T1);
+ p.addColumn(c1, c1, T1);
region.put(p);
p = new Put(T2, ts);
- p.add(c0, c0, T1);
- p.add(c0, c1, T1);
- p.add(c1, c0, T1);
- p.add(c1, c1, T1);
+ p.addColumn(c0, c0, T1);
+ p.addColumn(c0, c1, T1);
+ p.addColumn(c1, c0, T1);
+ p.addColumn(c1, c1, T1);
region.put(p);
p = new Put(T1, ts+1);
- p.add(c0, c0, T2);
- p.add(c0, c1, T2);
- p.add(c1, c0, T2);
- p.add(c1, c1, T2);
+ p.addColumn(c0, c0, T2);
+ p.addColumn(c0, c1, T2);
+ p.addColumn(c1, c0, T2);
+ p.addColumn(c1, c1, T2);
region.put(p);
p = new Put(T2, ts+1);
- p.add(c0, c0, T2);
- p.add(c0, c1, T2);
- p.add(c1, c0, T2);
- p.add(c1, c1, T2);
+ p.addColumn(c0, c0, T2);
+ p.addColumn(c0, c1, T2);
+ p.addColumn(c1, c0, T2);
+ p.addColumn(c1, c1, T2);
region.put(p);
Delete d = new Delete(T1, ts+2);
@@ -678,13 +678,13 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// this prevents marker collection based on earliestPut
// (cannot keep earliest put per column in the store file)
p = new Put(T1, ts-10);
- p.add(c0, c1, T1);
+ p.addColumn(c0, c1, T1);
region.put(p);
Delete d = new Delete(T1, ts);
@@ -709,14 +709,14 @@ public class TestKeepDeletes {
// the 2nd put (and all delete markers following)
// will be removed.
p = new Put(T1, ts+2);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
// delete, put, delete, delete, put
assertEquals(3, countDeleteMarkers(region));
p = new Put(T1, ts+3);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
// This is potentially questionable behavior.
@@ -746,7 +746,7 @@ public class TestKeepDeletes {
// add one more put
p = new Put(T1, ts+4);
- p.add(c0, c0, T4);
+ p.addColumn(c0, c0, T4);
region.put(p);
region.flush(true);
@@ -771,17 +771,17 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
- p.add(c0, c1, T1);
- p.add(c1, c0, T1);
- p.add(c1, c1, T1);
+ p.addColumn(c0, c0, T1);
+ p.addColumn(c0, c1, T1);
+ p.addColumn(c1, c0, T1);
+ p.addColumn(c1, c1, T1);
region.put(p);
p = new Put(T2, ts+1);
- p.add(c0, c0, T2);
- p.add(c0, c1, T2);
- p.add(c1, c0, T2);
- p.add(c1, c1, T2);
+ p.addColumn(c0, c0, T2);
+ p.addColumn(c0, c1, T2);
+ p.addColumn(c1, c0, T2);
+ p.addColumn(c1, c1, T2);
region.put(p);
// family markers are each family
@@ -823,16 +823,16 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past
Put p = new Put(T1, ts);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
p = new Put(T1, ts-1);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
p = new Put(T1, ts-3);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
p = new Put(T1, ts-4);
- p.add(c0, c0, T0);
+ p.addColumn(c0, c0, T0);
region.put(p);
// all puts now are just retained because of min versions = 3
@@ -861,7 +861,7 @@ public class TestKeepDeletes {
r = region.get(g);
checkResult(r, c0, c0, T1);
p = new Put(T1, ts+1);
- p.add(c0, c0, T4);
+ p.addColumn(c0, c0, T4);
region.put(p);
region.flush(true);
@@ -873,7 +873,7 @@ public class TestKeepDeletes {
// this will push out the last put before
// family delete marker
p = new Put(T1, ts+2);
- p.add(c0, c0, T5);
+ p.addColumn(c0, c0, T5);
region.put(p);
region.flush(true);
@@ -902,12 +902,12 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime() - 2000; // 2s in the past
Put p = new Put(T1, ts);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
// place an old row, to make the family marker expires anyway
p = new Put(T2, ts-10);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
checkGet(region, T1, c0, c0, ts+1, T3);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
index cd84eac..661583e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
@@ -75,15 +75,15 @@ public class TestMinVersions {
long ts = EnvironmentEdgeManager.currentTime() - 2000;
Put p = new Put(T1, ts);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
p = new Put(T1, ts+1);
- p.add(c0, c0, T4);
+ p.addColumn(c0, c0, T4);
region.put(p);
p = new Put(T3, ts);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
// now make sure that getClosestBefore(...) get can
@@ -124,11 +124,11 @@ public class TestMinVersions {
try {
Put p = new Put(T1, ts-1);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
p = new Put(T1, ts-3);
- p.add(c0, c0, T0);
+ p.addColumn(c0, c0, T0);
region.put(p);
// now flush/compact
@@ -136,15 +136,15 @@ public class TestMinVersions {
region.compact(true);
p = new Put(T1, ts);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
p = new Put(T1, ts-2);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
p = new Put(T1, ts-3);
- p.add(c0, c0, T0);
+ p.addColumn(c0, c0, T0);
region.put(p);
// newest version in the memstore
@@ -180,15 +180,15 @@ public class TestMinVersions {
try {
Put p = new Put(T1, ts-2);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
p = new Put(T1, ts-1);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
p = new Put(T1, ts);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
Delete d = new Delete(T1, ts-1);
@@ -240,17 +240,17 @@ public class TestMinVersions {
try {
// 2nd version
Put p = new Put(T1, ts-2);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
// 3rd version
p = new Put(T1, ts-1);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
// 4th version
p = new Put(T1, ts);
- p.add(c0, c0, T4);
+ p.addColumn(c0, c0, T4);
region.put(p);
// now flush/compact
@@ -259,7 +259,7 @@ public class TestMinVersions {
// now put the first version (backdated)
p = new Put(T1, ts-3);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// now the latest change is in the memstore,
@@ -280,7 +280,7 @@ public class TestMinVersions {
checkResult(r, c0, T4,T3);
p = new Put(T1, ts+1);
- p.add(c0, c0, T5);
+ p.addColumn(c0, c0, T5);
region.put(p);
// now the latest version is in the memstore
@@ -316,22 +316,22 @@ public class TestMinVersions {
// 1st version
Put p = new Put(T1, ts-3);
- p.add(c0, c0, T1);
+ p.addColumn(c0, c0, T1);
region.put(p);
// 2nd version
p = new Put(T1, ts-2);
- p.add(c0, c0, T2);
+ p.addColumn(c0, c0, T2);
region.put(p);
// 3rd version
p = new Put(T1, ts-1);
- p.add(c0, c0, T3);
+ p.addColumn(c0, c0, T3);
region.put(p);
// 4th version
p = new Put(T1, ts);
- p.add(c0, c0, T4);
+ p.addColumn(c0, c0, T4);
region.put(p);
Result r = region.get(new Get(T1));
@@ -408,23 +408,23 @@ public class TestMinVersions {
try {
Put p = new Put(T1, ts-3);
- p.add(c0, c0, T0);
- p.add(c1, c1, T0);
+ p.addColumn(c0, c0, T0);
+ p.addColumn(c1, c1, T0);
region.put(p);
p = new Put(T1, ts-2);
- p.add(c0, c0, T1);
- p.add(c1, c1, T1);
+ p.addColumn(c0, c0, T1);
+ p.addColumn(c1, c1, T1);
region.put(p);
p = new Put(T1, ts-1);
- p.add(c0, c0, T2);
- p.add(c1, c1, T2);
+ p.addColumn(c0, c0, T2);
+ p.addColumn(c1, c1, T2);
region.put(p);
p = new Put(T1, ts);
- p.add(c0, c0, T3);
- p.add(c1, c1, T3);
+ p.addColumn(c0, c0, T3);
+ p.addColumn(c1, c1, T3);
region.put(p);
List<Long> tss = new ArrayList<Long>();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
index d9453b0..4dc233f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
@@ -115,7 +115,7 @@ public class TestParallelPut {
long value = 1L;
Put put = new Put(row);
- put.add(fam1, qual1, Bytes.toBytes(value));
+ put.addColumn(fam1, qual1, Bytes.toBytes(value));
region.put(put);
assertGet(this.region, row, fam1, qual1, Bytes.toBytes(value));
@@ -215,7 +215,7 @@ public class TestParallelPut {
// put the randombytes and verify that we can read it. This is one
// way of ensuring that rwcc manipulation in HRegion.put() is fine.
Put put = new Put(rowkey);
- put.add(fam1, qual1, value);
+ put.addColumn(fam1, qual1, value);
in[0] = put;
try {
OperationStatus[] ret = region.batchMutate(in);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
index f238770..bcb8733 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransaction.java
@@ -460,7 +460,7 @@ public class TestRegionMergeTransaction {
continue;
}
Put put = new Put(k);
- put.add(f, null, k);
+ put.addColumn(f, null, k);
if (r.getWAL() == null)
put.setDurability(Durability.SKIP_WAL);
r.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
index 2a949a1..ae3ad1a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java
@@ -456,7 +456,7 @@ public class TestRegionMergeTransactionOnCluster {
private void loadData(Table table) throws IOException {
for (int i = 0; i < ROWSIZE; i++) {
Put put = new Put(ROWS[i]);
- put.add(FAMILYNAME, QUALIFIER, Bytes.toBytes(i));
+ put.addColumn(FAMILYNAME, QUALIFIER, Bytes.toBytes(i));
table.put(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
index 96b6122..870e963 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java
@@ -336,7 +336,7 @@ public class TestRegionReplicas {
while (running.get()) {
byte[] data = Bytes.toBytes(String.valueOf(key));
Put put = new Put(data);
- put.add(f, null, data);
+ put.addColumn(f, null, data);
table.put(put);
key++;
if (key == endKey) key = startKey;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 7a9e61f..67fedcd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -119,7 +119,7 @@ public class TestRegionServerMetrics {
// Do a first put to be sure that the connection is established, meta is there and so on.
Table table = connection.getTable(tName);
Put p = new Put(row);
- p.add(cfName, qualifier, initValue);
+ p.addColumn(cfName, qualifier, initValue);
table.put(p);
metricsRegionServer.getRegionServerWrapper().forceRecompute();
@@ -199,7 +199,7 @@ public class TestRegionServerMetrics {
Table t = TEST_UTIL.createTable(tableName, cf);
Put p = new Put(row);
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
p.setDurability(Durability.SKIP_WAL);
t.put(p);
@@ -227,7 +227,7 @@ public class TestRegionServerMetrics {
//Force a hfile.
Table t = TEST_UTIL.createTable(tableName, cf);
Put p = new Put(row);
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
t.put(p);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -251,15 +251,15 @@ public class TestRegionServerMetrics {
Table t = TEST_UTIL.createTable(tableName, cf);
Put p = new Put(row);
- p.add(cf, qualifier, valOne);
+ p.addColumn(cf, qualifier, valOne);
t.put(p);
Put pTwo = new Put(row);
- pTwo.add(cf, qualifier, valTwo);
+ pTwo.addColumn(cf, qualifier, valTwo);
t.checkAndPut(row, cf, qualifier, valOne, pTwo);
Put pThree = new Put(row);
- pThree.add(cf, qualifier, valThree);
+ pThree.addColumn(cf, qualifier, valThree);
t.checkAndPut(row, cf, qualifier, valOne, pThree);
metricsRegionServer.getRegionServerWrapper().forceRecompute();
@@ -281,7 +281,7 @@ public class TestRegionServerMetrics {
Table t = TEST_UTIL.createTable(tableName, cf);
Put p = new Put(row);
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
t.put(p);
for(int count = 0; count< 13; count++) {
@@ -308,7 +308,7 @@ public class TestRegionServerMetrics {
Table t = TEST_UTIL.createTable(tableName, cf);
Put p = new Put(row);
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
t.put(p);
for(int count = 0; count< 73; count++) {
@@ -334,7 +334,7 @@ public class TestRegionServerMetrics {
List<Put> puts = new ArrayList<>();
for (int insertCount =0; insertCount < 100; insertCount++) {
Put p = new Put(Bytes.toBytes("" + insertCount + "row"));
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
puts.add(p);
}
try (Table t = TEST_UTIL.createTable(tableName, cf)) {
@@ -384,7 +384,7 @@ public class TestRegionServerMetrics {
List<Put> puts = new ArrayList<>();
for (int insertCount =0; insertCount < 100; insertCount++) {
Put p = new Put(Bytes.toBytes("" + insertCount + "row"));
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
puts.add(p);
}
try (Table t = TEST_UTIL.createTable(tableName, cf)) {
@@ -442,7 +442,7 @@ public class TestRegionServerMetrics {
t.setAutoFlush(true, true);
for (int insertCount = 0; insertCount < numHfiles; insertCount++) {
Put p = new Put(Bytes.toBytes(insertCount));
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
t.put(p);
admin.flush(tableName);
}
@@ -471,7 +471,7 @@ public class TestRegionServerMetrics {
for (int insertCount = numHfiles;
insertCount < 2 * numHfiles - 1; insertCount++) {
Put p = new Put(Bytes.toBytes(insertCount));
- p.add(cf, qualifier, val);
+ p.addColumn(cf, qualifier, val);
t.put(p);
admin.flush(tableName);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
index b2405e8..e8a2134 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java
@@ -76,7 +76,7 @@ public class TestRegionServerNoMaster {
// Create table then get the single region for our new table.
table = HTU.createTable(tableName,HConstants.CATALOG_FAMILY);
Put p = new Put(row);
- p.add(HConstants.CATALOG_FAMILY, row, row);
+ p.addColumn(HConstants.CATALOG_FAMILY, row, row);
table.put(p);
try (RegionLocator locator = HTU.getConnection().getRegionLocator(tableName)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowTooBig.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowTooBig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowTooBig.java
index ab12195..4d3a1c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowTooBig.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowTooBig.java
@@ -92,7 +92,8 @@ public class TestRowTooBig {
for (int i = 0; i < 5 ; i++) {
Put put = new Put(row1);
- put.add(fam1, Bytes.toBytes("col_" + i ), new byte[5 * 1024 * 1024]);
+ byte[] value = new byte[5 * 1024 * 1024];
+ put.addColumn(fam1, Bytes.toBytes("col_" + i), value);
region.put(put);
region.flush(true);
}
@@ -138,7 +139,8 @@ public class TestRowTooBig {
for (int i = 0; i < 10; i++) {
Put put = new Put(row1);
for (int j = 0; j < 10 * 10000; j++) {
- put.add(fam1, Bytes.toBytes("col_" + i + "_" + j), new byte[10]);
+ byte[] value = new byte[10];
+ put.addColumn(fam1, Bytes.toBytes("col_" + i + "_" + j), value);
}
region.put(put);
region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
index 0c11978..909ae71 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java
@@ -90,27 +90,27 @@ public class TestSCVFWithMiniCluster {
/* Add a row with 'a:foo' = false */
Put put = new Put(Bytes.toBytes("1"));
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("false"));
- put.add(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
- put.add(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
- put.add(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("false"));
+ put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
puts.add(put);
/* Add a row with 'a:foo' = true */
put = new Put(Bytes.toBytes("2"));
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("true"));
- put.add(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
- put.add(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
- put.add(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_A, QUALIFIER_FOO, Bytes.toBytes("true"));
+ put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
puts.add(put);
/* Add a row with 'a:foo' qualifier not set */
put = new Put(Bytes.toBytes("3"));
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
- put.add(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
- put.add(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_A, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_B, QUALIFIER_FOO, Bytes.toBytes("_flag_"));
+ put.addColumn(FAMILY_B, QUALIFIER_BAR, Bytes.toBytes("_flag_"));
puts.add(put);
htable.put(puts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
index 5dd10b1..e84ed59 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java
@@ -135,7 +135,7 @@ public class TestScannerRetriableFailure {
byte[] row = Bytes.toBytes(String.format("%09d", i));
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_NAME, null, row);
+ put.addColumn(FAMILY_NAME, null, row);
table.put(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index 03cf8b8..fc3735a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -309,7 +309,7 @@ public class TestSeekOptimizations {
public void put(String qual, long ts) {
if (!putTimestamps.contains(ts)) {
- put.add(FAMILY_BYTES, Bytes.toBytes(qual), ts, createValue(ts));
+ put.addColumn(FAMILY_BYTES, Bytes.toBytes(qual), ts, createValue(ts));
putTimestamps.add(ts);
}
if (VERBOSE) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index 1cf06e5..2549a4f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -486,7 +486,7 @@ public class TestSplitTransactionOnCluster {
String row = "row" + i;
Put p = new Put(row.getBytes());
String val = "Val" + i;
- p.add("col".getBytes(), "ql".getBytes(), val.getBytes());
+ p.addColumn("col".getBytes(), "ql".getBytes(), val.getBytes());
table.put(p);
admin.flush(userTableName.getName());
Delete d = new Delete(row.getBytes());
@@ -500,13 +500,13 @@ public class TestSplitTransactionOnCluster {
.getRegionsOfTable(userTableName);
HRegionInfo hRegionInfo = regionsOfTable.get(0);
Put p = new Put("row6".getBytes());
- p.add("col".getBytes(), "ql".getBytes(), "val".getBytes());
+ p.addColumn("col".getBytes(), "ql".getBytes(), "val".getBytes());
table.put(p);
p = new Put("row7".getBytes());
- p.add("col".getBytes(), "ql".getBytes(), "val".getBytes());
+ p.addColumn("col".getBytes(), "ql".getBytes(), "val".getBytes());
table.put(p);
p = new Put("row8".getBytes());
- p.add("col".getBytes(), "ql".getBytes(), "val".getBytes());
+ p.addColumn("col".getBytes(), "ql".getBytes(), "val".getBytes());
table.put(p);
admin.flush(userTableName.getName());
admin.split(hRegionInfo.getRegionName(), "row7".getBytes());
@@ -753,16 +753,16 @@ public class TestSplitTransactionOnCluster {
private void insertData(final TableName tableName, HBaseAdmin admin, Table t) throws IOException,
InterruptedException {
Put p = new Put(Bytes.toBytes("row1"));
- p.add(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("1"));
+ p.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("1"));
t.put(p);
p = new Put(Bytes.toBytes("row2"));
- p.add(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("2"));
+ p.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("2"));
t.put(p);
p = new Put(Bytes.toBytes("row3"));
- p.add(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("3"));
+ p.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("3"));
t.put(p);
p = new Put(Bytes.toBytes("row4"));
- p.add(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("4"));
+ p.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("q1"), Bytes.toBytes("4"));
t.put(p);
admin.flush(tableName);
}
@@ -966,7 +966,7 @@ public class TestSplitTransactionOnCluster {
Table hTable = connection.getTable(desc.getTableName());
for(int i = 1; i < 5; i++) {
Put p1 = new Put(("r"+i).getBytes());
- p1.add(Bytes.toBytes("f"), "q1".getBytes(), "v".getBytes());
+ p1.addColumn(Bytes.toBytes("f"), "q1".getBytes(), "v".getBytes());
hTable.put(p1);
}
admin.flush(desc.getTableName());
@@ -1055,8 +1055,8 @@ public class TestSplitTransactionOnCluster {
HRegion region = regions.get(0);
for(int i = 3;i<9;i++) {
Put p = new Put(Bytes.toBytes("row"+i));
- p.add(Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("value"+i));
- p.add(Bytes.toBytes("i_f"), Bytes.toBytes("q"), Bytes.toBytes("value"+i));
+ p.addColumn(Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("value" + i));
+ p.addColumn(Bytes.toBytes("i_f"), Bytes.toBytes("q"), Bytes.toBytes("value" + i));
region.put(p);
}
region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
index 92e0558..826d06f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitWalDataLoss.java
@@ -118,7 +118,8 @@ public class TestSplitWalDataLoss {
Connection conn = testUtil.getConnection();
try (Table table = conn.getTable(tableName)) {
- table.put(new Put(Bytes.toBytes("row0")).addColumn(family, qualifier, Bytes.toBytes("val0")));
+ table.put(new Put(Bytes.toBytes("row0"))
+ .addColumn(family, qualifier, Bytes.toBytes("val0")));
}
long oldestSeqIdOfStore = region.getOldestSeqIdOfStore(family);
Log.info("CHANGE OLDEST " + oldestSeqIdOfStore);
@@ -130,7 +131,8 @@ public class TestSplitWalDataLoss {
}
}
try (Table table = conn.getTable(tableName)) {
- table.put(new Put(Bytes.toBytes("row1")).addColumn(family, qualifier, Bytes.toBytes("val1")));
+ table.put(new Put(Bytes.toBytes("row1"))
+ .addColumn(family, qualifier, Bytes.toBytes("val1")));
}
long now = EnvironmentEdgeManager.currentTime();
rs.tryRegionServerReport(now - 500, now);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
index 18eda70..74b3df9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java
@@ -121,7 +121,7 @@ public class TestStoreFileRefresherChore {
Put put = new Put(Bytes.toBytes("" + i));
put.setDurability(Durability.SKIP_WAL);
for (byte[] family : families) {
- put.add(family, qf, null);
+ put.addColumn(family, qf, null);
}
region.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
index 32e5855..a85e479 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java
@@ -120,7 +120,7 @@ public class TestTags {
byte[] value = Bytes.toBytes("value");
table = TEST_UTIL.getConnection().getTable(tableName);
Put put = new Put(row);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setAttribute("visibility", Bytes.toBytes("myTag"));
table.put(put);
admin.flush(tableName);
@@ -133,7 +133,7 @@ public class TestTags {
Put put1 = new Put(row1);
byte[] value1 = Bytes.toBytes("1000dfsdf");
- put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
+ put1.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
// put1.setAttribute("visibility", Bytes.toBytes("myTag3"));
table.put(put1);
admin.flush(tableName);
@@ -141,7 +141,7 @@ public class TestTags {
Put put2 = new Put(row2);
byte[] value2 = Bytes.toBytes("1000dfsdf");
- put2.add(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
+ put2.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
put2.setAttribute("visibility", Bytes.toBytes("myTag3"));
table.put(put2);
admin.flush(tableName);
@@ -187,7 +187,7 @@ public class TestTags {
table = TEST_UTIL.getConnection().getTable(tableName);
Put put = new Put(row);
byte[] value = Bytes.toBytes("value");
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
table.put(put);
admin.flush(tableName);
// We are lacking an API for confirming flush request compaction.
@@ -199,14 +199,14 @@ public class TestTags {
Put put1 = new Put(row1);
byte[] value1 = Bytes.toBytes("1000dfsdf");
- put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
+ put1.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
table.put(put1);
admin.flush(tableName);
Thread.sleep(1000);
Put put2 = new Put(row2);
byte[] value2 = Bytes.toBytes("1000dfsdf");
- put2.add(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
+ put2.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
table.put(put2);
admin.flush(tableName);
Thread.sleep(1000);
@@ -277,13 +277,13 @@ public class TestTags {
table = TEST_UTIL.getConnection().getTable(tableName);
Put put = new Put(row);
byte[] value = Bytes.toBytes("value");
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
int bigTagLen = Short.MAX_VALUE - 5;
put.setAttribute("visibility", new byte[bigTagLen]);
table.put(put);
Put put1 = new Put(row1);
byte[] value1 = Bytes.toBytes("1000dfsdf");
- put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
+ put1.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
table.put(put1);
admin.flush(tableName);
// We are lacking an API for confirming flush request compaction.
@@ -295,18 +295,18 @@ public class TestTags {
put1 = new Put(row2);
value1 = Bytes.toBytes("1000dfsdf");
- put1.add(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
+ put1.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value1);
table.put(put1);
admin.flush(tableName);
Thread.sleep(1000);
Put put2 = new Put(rowd);
byte[] value2 = Bytes.toBytes("1000dfsdf");
- put2.add(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
+ put2.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
table.put(put2);
put2 = new Put(rowe);
value2 = Bytes.toBytes("1000dfsddfdf");
- put2.add(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
+ put2.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value2);
put.setAttribute("visibility", Bytes.toBytes("ram"));
table.put(put2);
admin.flush(tableName);
@@ -390,7 +390,7 @@ public class TestTags {
table = TEST_UTIL.getConnection().getTable(tableName);
Put put = new Put(row1);
byte[] v = Bytes.toBytes(2L);
- put.add(f, q, v);
+ put.addColumn(f, q, v);
put.setAttribute("visibility", Bytes.toBytes("tag1"));
table.put(put);
Increment increment = new Increment(row1);
@@ -430,7 +430,7 @@ public class TestTags {
put = new Put(row2);
v = Bytes.toBytes(2L);
- put.add(f, q, v);
+ put.addColumn(f, q, v);
table.put(put);
increment = new Increment(row2);
increment.add(new KeyValue(row2, f, q, 1234L, v));
@@ -452,7 +452,7 @@ public class TestTags {
// Test Append
byte[] row3 = Bytes.toBytes("r3");
put = new Put(row3);
- put.add(f, q, Bytes.toBytes("a"));
+ put.addColumn(f, q, Bytes.toBytes("a"));
put.setAttribute("visibility", Bytes.toBytes("tag1"));
table.put(put);
Append append = new Append(row3);
@@ -492,7 +492,7 @@ public class TestTags {
byte[] row4 = Bytes.toBytes("r4");
put = new Put(row4);
- put.add(f, q, Bytes.toBytes("a"));
+ put.addColumn(f, q, Bytes.toBytes("a"));
table.put(put);
append = new Append(row4);
append.add(new KeyValue(row4, f, q, 1234L, v));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
index cfa1695..ca7b3b1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
@@ -78,7 +78,8 @@ public class TestWideScanner extends HBaseTestCase {
for (j = 0; j < 100; j++) {
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
- put.add(COLUMNS[rng.nextInt(COLUMNS.length)], b, ++ts, b);
+ long ts1 = ++ts;
+ put.addColumn(COLUMNS[rng.nextInt(COLUMNS.length)], b, ts1, b);
region.put(put);
count++;
}
[4/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
Posted by jm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
index 9207f0c..0e47d39 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
@@ -86,7 +86,8 @@ public class TestConstraint {
// test that we don't fail on a valid put
Put put = new Put(row1);
byte[] value = Integer.toString(10).getBytes();
- put.add(dummy, new byte[0], value);
+ byte[] qualifier = new byte[0];
+ put.addColumn(dummy, qualifier, value);
table.put(put);
} finally {
table.close();
@@ -117,7 +118,8 @@ public class TestConstraint {
// test that we do fail on violation
Put put = new Put(row1);
- put.add(dummy, new byte[0], "fail".getBytes());
+ byte[] qualifier = new byte[0];
+ put.addColumn(dummy, qualifier, "fail".getBytes());
LOG.warn("Doing put in table");
try {
table.put(put);
@@ -160,7 +162,8 @@ public class TestConstraint {
try {
// test that we don't fail because its disabled
Put put = new Put(row1);
- put.add(dummy, new byte[0], "pass".getBytes());
+ byte[] qualifier = new byte[0];
+ put.addColumn(dummy, qualifier, "pass".getBytes());
table.put(put);
} finally {
table.close();
@@ -192,7 +195,8 @@ public class TestConstraint {
try {
// test that we do fail on violation
Put put = new Put(row1);
- put.add(dummy, new byte[0], "pass".getBytes());
+ byte[] qualifier = new byte[0];
+ put.addColumn(dummy, qualifier, "pass".getBytes());
LOG.warn("Doing put in table");
table.put(put);
} finally {
@@ -224,8 +228,9 @@ public class TestConstraint {
// test that we do fail on violation
Put put = new Put(row1);
- put.add(dummy, new byte[0], "pass".getBytes());
-
+ byte[] qualifier = new byte[0];
+ put.addColumn(dummy, qualifier, "pass".getBytes());
+
try{
table.put(put);
fail("RuntimeFailConstraint wasn't triggered - this put shouldn't work!");
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
index f1513b2..4fe0d23 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
@@ -93,12 +93,12 @@ public class TestAggregateProtocol {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
Long l = new Long(i);
- put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(l));
+ put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(l));
table.put(put);
Put p2 = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
- p2.add(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(l)), Bytes
- .toBytes(l * 10));
+ p2.addColumn(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(l)), Bytes
+ .toBytes(l * 10));
table.put(p2);
}
table.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index cad4205..d62e950 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -94,7 +94,7 @@ public class TestBatchCoprocessorEndpoint {
Table table = util.getConnection().getTable(TEST_TABLE);
for (int i = 0; i < ROWSIZE; i++) {
Put put = new Put(ROWS[i]);
- put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
+ put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
table.put(put);
}
table.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
index 6b54abb..d9fc881 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
@@ -90,11 +90,12 @@ public class TestDoubleColumnInterpreter {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
Double d = new Double(i);
- put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(d));
+ put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(d));
table.put(put);
Put p2 = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
- p2.add(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(d)), Bytes.toBytes(d * 0.10));
+ p2.addColumn(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(d)),
+ Bytes.toBytes(d * 0.10));
table.put(p2);
}
table.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
index c1d21fa..bd89744 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
@@ -108,15 +108,15 @@ public class TestHTableWrapper {
table = util.createTable(TEST_TABLE, TEST_FAMILY);
Put puta = new Put(ROW_A);
- puta.add(TEST_FAMILY, qualifierCol1, bytes1);
+ puta.addColumn(TEST_FAMILY, qualifierCol1, bytes1);
table.put(puta);
Put putb = new Put(ROW_B);
- putb.add(TEST_FAMILY, qualifierCol1, bytes2);
+ putb.addColumn(TEST_FAMILY, qualifierCol1, bytes2);
table.put(putb);
Put putc = new Put(ROW_C);
- putc.add(TEST_FAMILY, qualifierCol1, bytes3);
+ putc.addColumn(TEST_FAMILY, qualifierCol1, bytes3);
table.put(putc);
}
@@ -204,7 +204,7 @@ public class TestHTableWrapper {
private void checkPutsAndDeletes() throws IOException {
// put:
- Put putD = new Put(ROW_D).add(TEST_FAMILY, qualifierCol1, bytes2);
+ Put putD = new Put(ROW_D).addColumn(TEST_FAMILY, qualifierCol1, bytes2);
hTableInterface.put(putD);
checkRowValue(ROW_D, bytes2);
@@ -214,8 +214,8 @@ public class TestHTableWrapper {
checkRowValue(ROW_D, null);
// multiple puts:
- Put[] puts = new Put[] { new Put(ROW_D).add(TEST_FAMILY, qualifierCol1, bytes2),
- new Put(ROW_E).add(TEST_FAMILY, qualifierCol1, bytes3) };
+ Put[] puts = new Put[] {new Put(ROW_D).addColumn(TEST_FAMILY, qualifierCol1, bytes2),
+ new Put(ROW_E).addColumn(TEST_FAMILY, qualifierCol1, bytes3)};
hTableInterface.put(Arrays.asList(puts));
checkRowsValues(new byte[][] { ROW_D, ROW_E }, new byte[][] { bytes2, bytes3 });
@@ -226,7 +226,7 @@ public class TestHTableWrapper {
}
private void checkCheckAndPut() throws IOException {
- Put putC = new Put(ROW_C).add(TEST_FAMILY, qualifierCol1, bytes5);
+ Put putC = new Put(ROW_C).addColumn(TEST_FAMILY, qualifierCol1, bytes5);
assertFalse(hTableInterface.checkAndPut(ROW_C, TEST_FAMILY, qualifierCol1, /* expect */bytes4,
putC/* newValue */));
assertTrue(hTableInterface.checkAndPut(ROW_C, TEST_FAMILY, qualifierCol1, /* expect */bytes3,
@@ -242,7 +242,7 @@ public class TestHTableWrapper {
}
private void checkIncrementColumnValue() throws IOException {
- hTableInterface.put(new Put(ROW_A).add(TEST_FAMILY, qualifierCol1, Bytes.toBytes(1L)));
+ hTableInterface.put(new Put(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, Bytes.toBytes(1L)));
checkRowValue(ROW_A, Bytes.toBytes(1L));
final long newVal = hTableInterface
@@ -319,7 +319,7 @@ public class TestHTableWrapper {
}
private void checkMutateRow() throws IOException {
- Put put = new Put(ROW_A).add(TEST_FAMILY, qualifierCol1, bytes1);
+ Put put = new Put(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, bytes1);
RowMutations rowMutations = new RowMutations(ROW_A);
rowMutations.add(put);
hTableInterface.mutateRow(rowMutations);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
index 3122b4c..53b34b2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
@@ -102,7 +102,7 @@ public class TestOpenTableInCoprocessor {
final WALEdit edit, final Durability durability) throws IOException {
Table table = e.getEnvironment().getTable(otherTable, getPool());
Put p = new Put(new byte[] { 'a' });
- p.add(family, null, new byte[] { 'a' });
+ p.addColumn(family, null, new byte[]{'a'});
try {
table.batch(Collections.singletonList(put), null);
} catch (InterruptedException e1) {
@@ -162,7 +162,7 @@ public class TestOpenTableInCoprocessor {
Table table = UTIL.getConnection().getTable(TableName.valueOf("primary"));
Put p = new Put(new byte[] { 'a' });
- p.add(family, null, new byte[] { 'a' });
+ p.addColumn(family, null, new byte[]{'a'});
table.put(p);
table.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
index 73d7a96..3cbbe9d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
@@ -94,7 +94,7 @@ public class TestRegionObserverBypass {
public void testSimple() throws Exception {
Table t = util.getConnection().getTable(tableName);
Put p = new Put(row1);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
// before HBASE-4331, this would throw an exception
t.put(p);
checkRowAndDelete(t,row1,0);
@@ -114,13 +114,13 @@ public class TestRegionObserverBypass {
Table t = util.getConnection().getTable(tableName);
List<Put> puts = new ArrayList<Put>();
Put p = new Put(row1);
- p.add(dummy,dummy,dummy);
+ p.addColumn(dummy, dummy, dummy);
puts.add(p);
p = new Put(row2);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row3);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
// before HBASE-4331, this would throw an exception
t.put(puts);
@@ -130,13 +130,13 @@ public class TestRegionObserverBypass {
puts.clear();
p = new Put(row1);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row2);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row3);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
// before HBASE-4331, this would throw an exception
t.put(puts);
@@ -146,13 +146,13 @@ public class TestRegionObserverBypass {
puts.clear();
p = new Put(row1);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row2);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row3);
- p.add(dummy,dummy,dummy);
+ p.addColumn(dummy, dummy, dummy);
puts.add(p);
// this worked fine even before HBASE-4331
t.put(puts);
@@ -162,13 +162,13 @@ public class TestRegionObserverBypass {
puts.clear();
p = new Put(row1);
- p.add(dummy,dummy,dummy);
+ p.addColumn(dummy, dummy, dummy);
puts.add(p);
p = new Put(row2);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row3);
- p.add(dummy,dummy,dummy);
+ p.addColumn(dummy, dummy, dummy);
puts.add(p);
// this worked fine even before HBASE-4331
t.put(puts);
@@ -178,13 +178,13 @@ public class TestRegionObserverBypass {
puts.clear();
p = new Put(row1);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
p = new Put(row2);
- p.add(dummy,dummy,dummy);
+ p.addColumn(dummy, dummy, dummy);
puts.add(p);
p = new Put(row3);
- p.add(test,dummy,dummy);
+ p.addColumn(test, dummy, dummy);
puts.add(p);
// before HBASE-4331, this would throw an exception
t.put(puts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 3ddf601..45ba04b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -130,9 +130,9 @@ public class TestRegionObserverInterface {
new Boolean[] { false, false, false, false, false, false, false, false });
Put put = new Put(ROW);
- put.add(A, A, A);
- put.add(B, B, B);
- put.add(C, C, C);
+ put.addColumn(A, A, A);
+ put.addColumn(B, B, B);
+ put.addColumn(C, C, C);
table.put(put);
verifyMethodResult(SimpleRegionObserver.class, new String[] { "hadPreGet", "hadPostGet",
@@ -189,9 +189,9 @@ public class TestRegionObserverInterface {
tableName,
new Boolean[] {false, false, false, false, false});
Put put = new Put(ROW);
- put.add(A, A, A);
- put.add(B, B, B);
- put.add(C, C, C);
+ put.addColumn(A, A, A);
+ put.addColumn(B, B, B);
+ put.addColumn(C, C, C);
Delete delete = new Delete(ROW);
delete.deleteColumn(A, A);
@@ -248,10 +248,10 @@ public class TestRegionObserverInterface {
TableName.valueOf(TEST_TABLE.getNameAsString() + ".testCheckAndPutHooks");
try (Table table = util.createTable(tableName, new byte[][] {A, B, C})) {
Put p = new Put(Bytes.toBytes(0));
- p.add(A, A, A);
+ p.addColumn(A, A, A);
table.put(p);
p = new Put(Bytes.toBytes(0));
- p.add(A, A, A);
+ p.addColumn(A, A, A);
verifyMethodResult(SimpleRegionObserver.class,
new String[] { "hadPreCheckAndPut", "hadPreCheckAndPutAfterRowLock",
"hadPostCheckAndPut" }, tableName, new Boolean[] { false, false, false });
@@ -274,7 +274,7 @@ public class TestRegionObserverInterface {
Table table = util.createTable(tableName, new byte[][] {A, B, C});
try {
Put p = new Put(Bytes.toBytes(0));
- p.add(A, A, A);
+ p.addColumn(A, A, A);
table.put(p);
Delete d = new Delete(Bytes.toBytes(0));
table.delete(d);
@@ -338,7 +338,7 @@ public class TestRegionObserverInterface {
Table table = util.getConnection().getTable(tableName);
Put put = new Put(ROW);
- put.add(A, A, A);
+ put.addColumn(A, A, A);
table.put(put);
Get get = new Get(ROW);
@@ -412,7 +412,7 @@ public class TestRegionObserverInterface {
Table table = util.getConnection().getTable(tableName);
Put put = new Put(ROW);
- put.add(A, A, A);
+ put.addColumn(A, A, A);
table.put(put);
Delete delete = new Delete(ROW);
@@ -522,7 +522,7 @@ public class TestRegionObserverInterface {
byte[] iBytes = Bytes.toBytes(i);
Put put = new Put(iBytes);
put.setDurability(Durability.SKIP_WAL);
- put.add(A, A, iBytes);
+ put.addColumn(A, A, iBytes);
table.put(put);
}
@@ -677,9 +677,9 @@ public class TestRegionObserverInterface {
}
Put put = new Put(ROW);
- put.add(A, A, A);
- put.add(B, B, B);
- put.add(C, C, C);
+ put.addColumn(A, A, A);
+ put.addColumn(B, B, B);
+ put.addColumn(C, C, C);
table.put(put);
verifyMethodResult(SimpleRegionObserver.Legacy.class,
@@ -729,9 +729,9 @@ public class TestRegionObserverInterface {
}
Put put = new Put(ROW);
- put.add(A, A, A);
- put.add(B, B, B);
- put.add(C, C, C);
+ put.addColumn(A, A, A);
+ put.addColumn(B, B, B);
+ put.addColumn(C, C, C);
table.put(put);
cluster.killRegionServer(rs1.getRegionServer().getServerName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
index 44e06bd..e20c4ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
@@ -178,7 +178,7 @@ public class TestRegionObserverScannerOpenHook {
h.load(EmptyRegionObsever.class, Coprocessor.PRIORITY_USER, conf);
Put put = new Put(ROW);
- put.add(A, A, A);
+ put.addColumn(A, A, A);
region.put(put);
Get get = new Get(ROW);
@@ -204,7 +204,7 @@ public class TestRegionObserverScannerOpenHook {
// put a row and flush it to disk
Put put = new Put(ROW);
- put.add(A, A, A);
+ put.addColumn(A, A, A);
region.put(put);
region.flush(true);
Get get = new Get(ROW);
@@ -278,7 +278,7 @@ public class TestRegionObserverScannerOpenHook {
// put a row and flush it to disk
Put put = new Put(ROW);
- put.add(A, A, A);
+ put.addColumn(A, A, A);
table.put(put);
HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getTableName());
@@ -291,7 +291,7 @@ public class TestRegionObserverScannerOpenHook {
// put another row and flush that too
put = new Put(Bytes.toBytes("anotherrow"));
- put.add(A, A, A);
+ put.addColumn(A, A, A);
table.put(put);
admin.flushRegion(region.getRegionInfo().getRegionName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
index 0a4ca16..723edcb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
@@ -127,7 +127,7 @@ public class TestRegionObserverStacking extends TestCase {
h.load(ObserverC.class, Coprocessor.PRIORITY_LOWEST, conf);
Put put = new Put(ROW);
- put.add(A, A, A);
+ put.addColumn(A, A, A);
region.put(put);
Coprocessor c = h.findCoprocessor(ObserverA.class.getName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
index 537a415..d25948b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
@@ -107,7 +107,7 @@ public class TestRegionServerCoprocessorExceptionWithAbort {
try {
final byte[] ROW = Bytes.toBytes("aaa");
Put put = new Put(ROW);
- put.add(TEST_FAMILY, ROW, ROW);
+ put.addColumn(TEST_FAMILY, ROW, ROW);
table.put(put);
} catch (IOException e) {
// The region server is going to be aborted.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 5f2b7bd..b3d3890 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -138,15 +138,15 @@ public class TestRowProcessorEndpoint {
table = util.createTable(TABLE, FAM);
{
Put put = new Put(ROW);
- put.add(FAM, A, Bytes.add(B, C)); // B, C are friends of A
- put.add(FAM, B, Bytes.add(D, E, F)); // D, E, F are friends of B
- put.add(FAM, C, G); // G is a friend of C
+ put.addColumn(FAM, A, Bytes.add(B, C)); // B, C are friends of A
+ put.addColumn(FAM, B, Bytes.add(D, E, F)); // D, E, F are friends of B
+ put.addColumn(FAM, C, G); // G is a friend of C
table.put(put);
rowSize = put.size();
}
Put put = new Put(ROW2);
- put.add(FAM, D, E);
- put.add(FAM, F, G);
+ put.addColumn(FAM, D, E);
+ put.addColumn(FAM, F, G);
table.put(put);
row2Size = put.size();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 7772664..75fe7a2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -489,7 +489,7 @@ public class TestWALObserver {
private Put creatPutWith2Families(byte[] row) throws IOException {
Put p = new Put(row);
for (int i = 0; i < TEST_FAMILY.length - 1; i++) {
- p.add(TEST_FAMILY[i], TEST_QUALIFIER[i], TEST_VALUE[i]);
+ p.addColumn(TEST_FAMILY[i], TEST_QUALIFIER[i], TEST_VALUE[i]);
}
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
index 6ccfc62..3a635b8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
@@ -100,26 +100,26 @@ public class TestDependentColumnFilter {
private void addData() throws IOException {
Put put = new Put(ROWS[0]);
// add in an entry for each stamp, with 2 as a "good" value
- put.add(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
- put.add(FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1]);
- put.add(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1]);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
// add in entries for stamps 0 and 2.
// without a value check both will be "accepted"
// with one 2 will be accepted(since the corresponding ts entry
// has a matching value
- put.add(FAMILIES[1], QUALIFIER, STAMPS[0], BAD_VALS[0]);
- put.add(FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[0], BAD_VALS[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2]);
this.region.put(put);
put = new Put(ROWS[1]);
- put.add(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
// there is no corresponding timestamp for this so it should never pass
- put.add(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
// if we reverse the qualifiers this one should pass
- put.add(FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL);
// should pass
- put.add(FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2]);
this.region.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 92be81a..e0a486e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -152,7 +152,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
}
this.region.put(p);
}
@@ -160,7 +160,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
}
this.region.put(p);
}
@@ -173,7 +173,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
}
this.region.put(p);
}
@@ -181,7 +181,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
}
this.region.put(p);
}
@@ -229,7 +229,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for (byte[] QUALIFIER : QUALIFIERS_THREE) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
}
this.region.put(p);
@@ -238,7 +238,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for (byte[] QUALIFIER : QUALIFIERS_FOUR) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
}
this.region.put(p);
}
@@ -250,7 +250,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for (byte[] QUALIFIER : QUALIFIERS_THREE) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
}
this.region.put(p);
}
@@ -258,7 +258,7 @@ public class TestFilter {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for (byte[] QUALIFIER : QUALIFIERS_FOUR) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
}
this.region.put(p);
}
@@ -1458,7 +1458,7 @@ public class TestFilter {
for(int i=0; i<5; i++) {
Put p = new Put(Bytes.toBytes((char)('a'+i) + "row"));
p.setDurability(Durability.SKIP_WAL);
- p.add(family, qualifier, Bytes.toBytes(String.valueOf(111+i)));
+ p.addColumn(family, qualifier, Bytes.toBytes(String.valueOf(111 + i)));
testRegion.put(p);
}
testRegion.flush(true);
@@ -1501,7 +1501,7 @@ public class TestFilter {
// Need to change one of the group one columns to use group two value
Put p = new Put(ROWS_ONE[2]);
- p.add(FAMILIES[0], QUALIFIERS_ONE[2], VALUES[1]);
+ p.addColumn(FAMILIES[0], QUALIFIERS_ONE[2], VALUES[1]);
this.region.put(p);
// Now let's grab rows that have Q_ONE[0](VALUES[0]) and Q_ONE[2](VALUES[1])
@@ -1816,11 +1816,11 @@ public class TestFilter {
@Test
public void testColumnPaginationFilter() throws Exception {
- // Test that the filter skips multiple column versions.
- Put p = new Put(ROWS_ONE[0]);
- p.setDurability(Durability.SKIP_WAL);
- p.add(FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]);
- this.region.put(p);
+ // Test that the filter skips multiple column versions.
+ Put p = new Put(ROWS_ONE[0]);
+ p.setDurability(Durability.SKIP_WAL);
+ p.addColumn(FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]);
+ this.region.put(p);
this.region.flush(true);
// Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row
@@ -2017,7 +2017,7 @@ public class TestFilter {
for(int i=0; i<10; i++) {
Put p = new Put(Bytes.toBytes("row" + i));
p.setDurability(Durability.SKIP_WAL);
- p.add(FAMILIES[0], columnStatus, Bytes.toBytes(i%2));
+ p.addColumn(FAMILIES[0], columnStatus, Bytes.toBytes(i % 2));
testRegion.put(p);
}
testRegion.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
index 78a4d1f..0d2940c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
@@ -105,8 +105,7 @@ public class TestFilterWithScanLimits extends FilterTestingCluster {
for (int i = 1; i < 4; i++) {
Put put = new Put(Bytes.toBytes("row" + i));
for (int j = 1; j < 6; j++) {
- put.add(Bytes.toBytes("f1"), Bytes.toBytes("c" + j),
- Bytes.toBytes(i + "_c" + j));
+ put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), Bytes.toBytes(i + "_c" + j));
}
puts.add(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
index a53dff1..efbddf8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
@@ -129,8 +129,8 @@ public class TestFilterWrapper {
long timestamp = j;
if (i != 1)
timestamp = i;
- put.add(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), timestamp,
- Bytes.toBytes(i + "_c" + j));
+ put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), timestamp,
+ Bytes.toBytes(i + "_c" + j));
}
puts.add(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
index 680ce5c..145e42f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
@@ -116,7 +116,7 @@ public class TestFuzzyRowAndColumnRangeFilter {
Put p = new Put(rk);
p.setDurability(Durability.SKIP_WAL);
- p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+ p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
ht.put(p);
LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: "
+ Bytes.toStringBinary(cq));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
index 5c78dfe..ba1d2a1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
@@ -139,7 +139,7 @@ public class TestFuzzyRowFilterEndToEnd {
Put p = new Put(rk);
p.setDurability(Durability.SKIP_WAL);
- p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+ p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
ht.put(p);
}
}
@@ -277,7 +277,7 @@ public class TestFuzzyRowFilterEndToEnd {
Put p = new Put(rk);
p.setDurability(Durability.SKIP_WAL);
- p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+ p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
ht.put(p);
LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: "
+ Bytes.toStringBinary(cq));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
index a8651d8..8291e52 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
@@ -75,8 +75,8 @@ public class TestInvocationRecordFilter {
Put put = new Put(ROW_BYTES);
for (int i = 0; i < 10; i += 2) {
// puts 0, 2, 4, 6 and 8
- put.add(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + i), i,
- Bytes.toBytes(VALUE_PREFIX + i));
+ put.addColumn(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + i), (long) i,
+ Bytes.toBytes(VALUE_PREFIX + i));
}
this.region.put(put);
this.region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
index 3be10ec..21a0df1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
@@ -75,7 +75,7 @@ public class TestScanRowPrefix extends FilterTestingCluster {
for (byte[] rowId: rowIds) {
Put p = new Put(rowId);
// Use the rowId as the column qualifier
- p.add("F".getBytes(), rowId, "Dummy value".getBytes());
+ p.addColumn("F".getBytes(), rowId, "Dummy value".getBytes());
table.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index 504350c..c529107 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -318,7 +318,7 @@ public class TestBlockReorder {
// insert one put to ensure a minimal size
Put p = new Put(sb);
- p.add(sb, sb, sb);
+ p.addColumn(sb, sb, sb);
h.put(p);
DirectoryListing dl = dfs.getClient().listPaths(rootDir, HdfsFileStatus.EMPTY_NAME);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index 53deeb2..d7e555d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -133,8 +133,7 @@ public class TestChangingEncoding {
for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
Put put = new Put(getRowKey(batchId, i));
for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
- put.add(CF_BYTES, getQualifier(j),
- getValue(batchId, i, j));
+ put.addColumn(CF_BYTES, getQualifier(j), getValue(batchId, i, j));
}
put.setDurability(Durability.SKIP_WAL);
puts.add(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
index 5ccb206..ce66e82 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
@@ -152,7 +152,7 @@ public class TestEncodedSeekers {
KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);
put.add(kv);
} else {
- put.add(CF_BYTES, col, value);
+ put.addColumn(CF_BYTES, col, value);
}
if(VERBOSE){
KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
index cf2aca5..1635310 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
@@ -131,8 +131,8 @@ public class TestForceCacheImportantBlocks {
Put put = new Put(Bytes.toBytes("row" + i));
for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
for (long ts = 1; ts < NUM_TIMESTAMPS_PER_COL; ++ts) {
- put.add(CF_BYTES, Bytes.toBytes("col" + j), ts,
- Bytes.toBytes("value" + i + "_" + j + "_" + ts));
+ put.addColumn(CF_BYTES, Bytes.toBytes("col" + j), ts,
+ Bytes.toBytes("value" + i + "_" + j + "_" + ts));
}
}
region.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
index 7584cf2..9c6bb38 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
@@ -106,8 +106,8 @@ public class TestScannerSelectionUsingKeyRange {
for (int iRow = 0; iRow < NUM_ROWS; ++iRow) {
Put put = new Put(Bytes.toBytes("row" + iRow));
for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) {
- put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
- Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
+ put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
+ Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
}
region.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index d5f4bcd..08b259d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -120,8 +120,8 @@ public class TestScannerSelectionUsingTTL {
for (int iRow = 0; iRow < NUM_ROWS; ++iRow) {
Put put = new Put(Bytes.toBytes("row" + iRow));
for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) {
- put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
- ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
+ put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), ts + version,
+ Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
}
region.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index 1975c59..da322bc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -125,12 +125,12 @@ public class TestTableInputFormat {
Table table = UTIL.createTable(TableName.valueOf(tableName), families);
Put p = new Put("aaa".getBytes());
for (byte[] family : families) {
- p.add(family, null, "value aaa".getBytes());
+ p.addColumn(family, null, "value aaa".getBytes());
}
table.put(p);
p = new Put("bbb".getBytes());
for (byte[] family : families) {
- p.add(family, null, "value bbb".getBytes());
+ p.addColumn(family, null, "value bbb".getBytes());
}
table.put(p);
return table;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index daa6e71..fd0db6a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -114,8 +114,7 @@ public class TestTableMapReduceUtil {
for (String president : presidentsRowKeys) {
if (presidentNames.hasNext()) {
Put p = new Put(Bytes.toBytes(president));
- p.add(COLUMN_FAMILY, COLUMN_QUALIFIER,
- Bytes.toBytes(presidentNames.next()));
+ p.addColumn(COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(presidentNames.next()));
table.put(p);
}
}
@@ -123,7 +122,7 @@ public class TestTableMapReduceUtil {
for (String actor : actorsRowKeys) {
if (actorNames.hasNext()) {
Put p = new Put(Bytes.toBytes(actor));
- p.add(COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(actorNames.next()));
+ p.addColumn(COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(actorNames.next()));
table.put(p);
}
}
@@ -265,8 +264,9 @@ public class TestTableMapReduceUtil {
String name = Bytes.toString(result.getValue(COLUMN_FAMILY,
COLUMN_QUALIFIER));
- outCollector.collect(outKey, new Put(Bytes.toBytes("rowKey2")).add(
- COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(name)));
+ outCollector.collect(outKey,
+ new Put(Bytes.toBytes("rowKey2"))
+ .addColumn(COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(name)));
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
index 54a81b7..bd2f82a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
@@ -83,14 +83,14 @@ public class TestCellCounter {
Table t = UTIL.createTable(sourceTable, families);
try{
Put p = new Put(ROW1);
- p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
- p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
- p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+ p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
t.put(p);
p = new Put(ROW2);
- p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
- p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
- p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+ p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
t.put(p);
String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1" };
runCount(args);
@@ -120,31 +120,32 @@ public class TestCellCounter {
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
- Put p = new Put(ROW1);
- p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
- p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
- p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
- t.put(p);
- p = new Put(ROW2);
- p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
- p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
- p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
- t.put(p);
- String[] args = {
- sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1", "--starttime=" + now,
- "--endtime=" + now + 2 };
- runCount(args);
- FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
- "part-r-00000");
- String data = IOUtils.toString(inputStream);
- inputStream.close();
- assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
- assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2"));
- assertTrue(data.contains("Total ROWS" + "\t" + "1"));
- assertTrue(data.contains("b;q" + "\t" + "1"));
- assertTrue(data.contains("a;q" + "\t" + "1"));
- assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
- assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+ t.put(p);
+ p = new Put(ROW2);
+ p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+ t.put(p);
+ String[] args = {
+ sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
+ "--starttime=" + now,
+ "--endtime=" + now + 2 };
+ runCount(args);
+ FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
+ "part-r-00000");
+ String data = IOUtils.toString(inputStream);
+ inputStream.close();
+ assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
+ assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2"));
+ assertTrue(data.contains("Total ROWS" + "\t" + "1"));
+ assertTrue(data.contains("b;q" + "\t" + "1"));
+ assertTrue(data.contains("a;q" + "\t" + "1"));
+ assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
+ assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
}finally{
t.close();
FileUtil.fullyDelete(new File(OUTPUT_DIR));
@@ -160,38 +161,38 @@ public class TestCellCounter {
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
- Put p = new Put(ROW1);
- p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
- p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
- p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
- t.put(p);
- p = new Put(ROW2);
- p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
- p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
- p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
- t.put(p);
- String[] args = {
- sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
- "--endtime=" + now + 1 };
- runCount(args);
- FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
- "part-r-00000");
- String data = IOUtils.toString(inputStream);
- inputStream.close();
- assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
- assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2"));
- assertTrue(data.contains("Total ROWS" + "\t" + "1"));
- assertTrue(data.contains("b;q" + "\t" + "1"));
- assertTrue(data.contains("a;q" + "\t" + "1"));
- assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
- assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+ t.put(p);
+ p = new Put(ROW2);
+ p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+ t.put(p);
+ String[] args = {
+ sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1",
+ "--endtime=" + now + 1 };
+ runCount(args);
+ FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
+ "part-r-00000");
+ String data = IOUtils.toString(inputStream);
+ inputStream.close();
+ assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
+ assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2"));
+ assertTrue(data.contains("Total ROWS" + "\t" + "1"));
+ assertTrue(data.contains("b;q" + "\t" + "1"));
+ assertTrue(data.contains("a;q" + "\t" + "1"));
+ assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
+ assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
}finally{
t.close();
FileUtil.fullyDelete(new File(OUTPUT_DIR));
}
}
- /**
+ /**
* Test CellCounter with time range all data should print to output
*/
@Test (timeout=300000)
@@ -200,27 +201,27 @@ public class TestCellCounter {
byte[][] families = { FAMILY_A, FAMILY_B };
Table t = UTIL.createTable(sourceTable, families);
try{
- Put p = new Put(ROW1);
- p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
- p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
- p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
- t.put(p);
- p = new Put(ROW2);
- p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
- p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
- p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
- t.put(p);
- String[] args = {
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+ t.put(p);
+ p = new Put(ROW2);
+ p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+ t.put(p);
+ String[] args = {
sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1,
- "--endtime=" + now + 2 };
+ "--endtime=" + now + 2 };
- runCount(args);
- FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
- "part-r-00000");
- String data = IOUtils.toString(inputStream);
+ runCount(args);
+ FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator +
+ "part-r-00000");
+ String data = IOUtils.toString(inputStream);
inputStream.close();
- // nothing should hace been emitted to the reducer
- assertTrue(data.isEmpty());
+ // nothing should hace been emitted to the reducer
+ assertTrue(data.isEmpty());
}finally{
t.close();
FileUtil.fullyDelete(new File(OUTPUT_DIR));
@@ -283,14 +284,14 @@ public class TestCellCounter {
Table t = UTIL.createTable(sourceTable, families);
try {
Put p = new Put(ROW1);
- p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
- p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
- p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+ p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
t.put(p);
p = new Put(ROW2);
- p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
- p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
- p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+ p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+ p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+ p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
t.put(p);
String[] args = { sourceTable.getNameAsString(), outputDir.toString(), ";" };
runCount(args);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index 626383b..628ca08 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -80,7 +80,7 @@ public class TestCopyTable {
// put rows into the first table
for (int i = 0; i < 10; i++) {
Put p = new Put(Bytes.toBytes("row" + i));
- p.add(FAMILY, COLUMN1, COLUMN1);
+ p.addColumn(FAMILY, COLUMN1, COLUMN1);
t1.put(p);
}
@@ -143,13 +143,13 @@ public class TestCopyTable {
// put rows into the first table
Put p = new Put(ROW0);
- p.add(FAMILY, COLUMN1, COLUMN1);
+ p.addColumn(FAMILY, COLUMN1, COLUMN1);
t1.put(p);
p = new Put(ROW1);
- p.add(FAMILY, COLUMN1, COLUMN1);
+ p.addColumn(FAMILY, COLUMN1, COLUMN1);
t1.put(p);
p = new Put(ROW2);
- p.add(FAMILY, COLUMN1, COLUMN1);
+ p.addColumn(FAMILY, COLUMN1, COLUMN1);
t1.put(p);
CopyTable copy = new CopyTable();
@@ -193,14 +193,14 @@ public class TestCopyTable {
Table t = TEST_UTIL.createTable(sourceTable, families);
Table t2 = TEST_UTIL.createTable(targetTable, families);
Put p = new Put(ROW1);
- p.add(FAMILY_A, QUALIFIER, Bytes.toBytes("Data11"));
- p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Data12"));
- p.add(FAMILY_A, QUALIFIER, Bytes.toBytes("Data13"));
+ p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data11"));
+ p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Data12"));
+ p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data13"));
t.put(p);
p = new Put(ROW2);
- p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Dat21"));
- p.add(FAMILY_A, QUALIFIER, Bytes.toBytes("Data22"));
- p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Data23"));
+ p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Dat21"));
+ p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data22"));
+ p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Data23"));
t.put(p);
long currentTime = System.currentTimeMillis();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index ecbde7a..05b2b8b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -1006,7 +1006,7 @@ public class TestHFileOutputFormat {
// put some data in it and flush to create a storefile
Put p = new Put(Bytes.toBytes("test"));
- p.add(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
+ p.addColumn(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
table.put(p);
admin.flush(TABLE_NAME);
assertEquals(1, util.countRows(table));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 6142cc5..9ff88f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -1047,7 +1047,7 @@ public class TestHFileOutputFormat2 {
// put some data in it and flush to create a storefile
Put p = new Put(Bytes.toBytes("test"));
- p.add(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
+ p.addColumn(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
table.put(p);
admin.flush(TABLE_NAME);
assertEquals(1, util.countRows(table));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index b9ad6af..5cf2281 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
@@ -172,14 +171,14 @@ public class TestImportExport {
String EXPORT_TABLE = "exportSimpleCase";
Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);
Put p = new Put(ROW1);
- p.add(FAMILYA, QUAL, now, QUAL);
- p.add(FAMILYA, QUAL, now+1, QUAL);
- p.add(FAMILYA, QUAL, now+2, QUAL);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
t.put(p);
p = new Put(ROW2);
- p.add(FAMILYA, QUAL, now, QUAL);
- p.add(FAMILYA, QUAL, now+1, QUAL);
- p.add(FAMILYA, QUAL, now+2, QUAL);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
t.put(p);
String[] args = new String[] {
@@ -272,12 +271,12 @@ public class TestImportExport {
Table t = UTIL.getConnection().getTable(desc.getTableName());
Put p = new Put(ROW1);
- p.add(FAMILYA, QUAL, now, QUAL);
- p.add(FAMILYA, QUAL, now+1, QUAL);
- p.add(FAMILYA, QUAL, now+2, QUAL);
- p.add(FAMILYA, QUAL, now+3, QUAL);
- p.add(FAMILYA, QUAL, now+4, QUAL);
- t.put(p);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+ t.put(p);
String[] args = new String[] {
"-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE, // added scanner batching arg.
@@ -303,11 +302,11 @@ public class TestImportExport {
Table t = UTIL.getConnection().getTable(desc.getTableName());
Put p = new Put(ROW1);
- p.add(FAMILYA, QUAL, now, QUAL);
- p.add(FAMILYA, QUAL, now+1, QUAL);
- p.add(FAMILYA, QUAL, now+2, QUAL);
- p.add(FAMILYA, QUAL, now+3, QUAL);
- p.add(FAMILYA, QUAL, now+4, QUAL);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
t.put(p);
Delete d = new Delete(ROW1, now+3);
@@ -371,7 +370,7 @@ public class TestImportExport {
//Add first version of QUAL
Put p = new Put(ROW1);
- p.add(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
exportT.put(p);
//Add Delete family marker
@@ -380,7 +379,7 @@ public class TestImportExport {
//Add second version of QUAL
p = new Put(ROW1);
- p.add(FAMILYA, QUAL, now+5, "s".getBytes());
+ p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes());
exportT.put(p);
//Add second Delete family marker
@@ -447,15 +446,15 @@ public class TestImportExport {
Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
Put p1 = new Put(ROW1);
- p1.add(FAMILYA, QUAL, now, QUAL);
- p1.add(FAMILYA, QUAL, now + 1, QUAL);
- p1.add(FAMILYA, QUAL, now + 2, QUAL);
- p1.add(FAMILYA, QUAL, now + 3, QUAL);
- p1.add(FAMILYA, QUAL, now + 4, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 4, QUAL);
// Having another row would actually test the filter.
Put p2 = new Put(ROW2);
- p2.add(FAMILYA, QUAL, now, QUAL);
+ p2.addColumn(FAMILYA, QUAL, now, QUAL);
exportTable.put(Arrays.asList(p1, p2));
@@ -639,15 +638,15 @@ public class TestImportExport {
// Insert some data
Put put = new Put(ROW1);
- put.add(FAMILYA, QUAL, now, QUAL);
- put.add(FAMILYA, QUAL, now + 1, QUAL);
- put.add(FAMILYA, QUAL, now + 2, QUAL);
+ put.addColumn(FAMILYA, QUAL, now, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
exportTable.put(put);
put = new Put(ROW2);
- put.add(FAMILYA, QUAL, now, QUAL);
- put.add(FAMILYA, QUAL, now + 1, QUAL);
- put.add(FAMILYA, QUAL, now + 2, QUAL);
+ put.addColumn(FAMILYA, QUAL, now, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
exportTable.put(put);
// Run the export
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 03052fb..5110ef7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -116,7 +115,7 @@ public class TestMultithreadedTableMapper {
newValue.reverse();
// Now set the value to be collected
Put outval = new Put(key.get());
- outval.add(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
+ outval.addColumn(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
context.write(key, outval);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 592feee..1bd2437 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -155,13 +155,13 @@ public class TestRowCounter {
// clean up content of TABLE_NAME
Table table = TEST_UTIL.deleteTableData(TableName.valueOf(TABLE_NAME));
ts = System.currentTimeMillis();
- put1.add(family, col1, ts, Bytes.toBytes("val1"));
+ put1.addColumn(family, col1, ts, Bytes.toBytes("val1"));
table.put(put1);
Thread.sleep(100);
ts = System.currentTimeMillis();
- put2.add(family, col1, ts, Bytes.toBytes("val2"));
- put3.add(family, col1, ts, Bytes.toBytes("val3"));
+ put2.addColumn(family, col1, ts, Bytes.toBytes("val2"));
+ put3.addColumn(family, col1, ts, Bytes.toBytes("val3"));
table.put(put2);
table.put(put3);
table.close();
@@ -227,9 +227,9 @@ public class TestRowCounter {
for (; i < TOTAL_ROWS - ROWS_WITH_ONE_COL; i++) {
byte[] row = Bytes.toBytes("row" + i);
Put put = new Put(row);
- put.add(family, col1, value);
- put.add(family, col2, value);
- put.add(family, col3, value);
+ put.addColumn(family, col1, value);
+ put.addColumn(family, col2, value);
+ put.addColumn(family, col3, value);
rowsUpdate.add(put);
}
@@ -237,7 +237,7 @@ public class TestRowCounter {
for (; i < TOTAL_ROWS; i++) {
byte[] row = Bytes.toBytes("row" + i);
Put put = new Put(row);
- put.add(family, col2, value);
+ put.addColumn(family, col2, value);
rowsUpdate.add(put);
}
table.put(rowsUpdate);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
index b0a4243..4693519 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
@@ -56,7 +56,6 @@ import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.junit.AfterClass;
import org.junit.Before;
@@ -120,12 +119,12 @@ public class TestTableInputFormat {
Table table = UTIL.createTable(TableName.valueOf(tableName), families);
Put p = new Put("aaa".getBytes());
for (byte[] family : families) {
- p.add(family, null, "value aaa".getBytes());
+ p.addColumn(family, null, "value aaa".getBytes());
}
table.put(p);
p = new Put("bbb".getBytes());
for (byte[] family : families) {
- p.add(family, null, "value bbb".getBytes());
+ p.addColumn(family, null, "value bbb".getBytes());
}
table.put(p);
return table;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
index 5ece857..a52eea6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
@@ -85,7 +85,7 @@ public class TestTableMapReduce extends TestTableMapReduceBase {
newValue.reverse();
// Now set the value to be collected
Put outval = new Put(key.get());
- outval.add(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
+ outval.addColumn(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
context.write(key, outval);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index 729b085..398c248 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -130,7 +130,7 @@ public abstract class TestTableMapReduceBase {
// Now set the value to be collected
Put outval = new Put(key.get());
- outval.add(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
+ outval.addColumn(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
return outval;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 7e75830..30ae944 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -121,7 +121,7 @@ public class TestTimeRangeMapRed {
for (Long ts : tsList) {
Put put = new Put(key.get());
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_NAME, COLUMN_NAME, ts, Bytes.toBytes(true));
+ put.addColumn(FAMILY_NAME, COLUMN_NAME, ts, Bytes.toBytes(true));
puts.add(put);
}
table.put(puts);
@@ -156,7 +156,7 @@ public class TestTimeRangeMapRed {
for (Map.Entry<Long, Boolean> entry : TIMESTAMP.entrySet()) {
Put put = new Put(KEY);
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY_NAME, COLUMN_NAME, entry.getKey(), Bytes.toBytes(false));
+ put.addColumn(FAMILY_NAME, COLUMN_NAME, entry.getKey(), Bytes.toBytes(false));
puts.add(put);
}
Table table = UTIL.getConnection().getTable(desc.getTableName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 60b050f..a9841db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -97,8 +97,8 @@ public class TestWALPlayer {
// put a row into the first table
Put p = new Put(ROW);
- p.add(FAMILY, COLUMN1, COLUMN1);
- p.add(FAMILY, COLUMN2, COLUMN2);
+ p.addColumn(FAMILY, COLUMN1, COLUMN1);
+ p.addColumn(FAMILY, COLUMN2, COLUMN2);
t1.put(p);
// delete one column
Delete d = new Delete(ROW);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
index abeab3f..5b3abea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
@@ -208,7 +208,7 @@ public class TestAssignmentListener {
for (int i = 0; i < 10; ++i) {
byte[] key = Bytes.toBytes("row-" + i);
Put put = new Put(key);
- put.add(FAMILY, null, key);
+ put.addColumn(FAMILY, null, key);
table.put(put);
}
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
index 579e28a..a9fa039 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
@@ -73,7 +73,8 @@ public class TestGetLastFlushedSequenceId {
testUtil.getHBaseAdmin().createNamespace(
NamespaceDescriptor.create(tableName.getNamespaceAsString()).build());
Table table = testUtil.createTable(tableName, families);
- table.put(new Put(Bytes.toBytes("k")).add(family, Bytes.toBytes("q"), Bytes.toBytes("v")));
+ table.put(new Put(Bytes.toBytes("k"))
+ .addColumn(family, Bytes.toBytes("q"), Bytes.toBytes("v")));
MiniHBaseCluster cluster = testUtil.getMiniHBaseCluster();
List<JVMClusterUtil.RegionServerThread> rsts = cluster.getRegionServerThreads();
Region region = null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
index 7cea0df..4e6460b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
@@ -504,7 +503,7 @@ public class TestMasterTransitions {
byte [] row = getStartKey(hri);
Put p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
- p.add(getTestFamily(), getTestQualifier(), row);
+ p.addColumn(getTestFamily(), getTestQualifier(), row);
t.put(p);
rows++;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
index ed5d919..53ee92b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
@@ -93,7 +93,7 @@ public class TestWarmupRegion {
for (int i = 0; i < numRows; i++) {
long ts = System.currentTimeMillis() * 2;
Put put = new Put(ROW, ts);
- put.add(FAMILY, COLUMN, VALUE);
+ put.addColumn(FAMILY, COLUMN, VALUE);
table.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
index 2cf26c0..d24023d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
@@ -210,7 +210,7 @@ public class TestSimpleRegionNormalizerOnCluster {
Put put = new Put(key);
byte[] col = Bytes.toBytes(String.valueOf(j));
byte[] value = dataGenerator.generateRandomSizeValue(key, col);
- put.add(FAMILYNAME, col, value);
+ put.addColumn(FAMILYNAME, col, value);
region.put(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
index b3f29db..45b62cd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
@@ -420,7 +420,7 @@ public class MasterProcedureTestingUtility {
Put put = new Put(key);
put.setDurability(Durability.SKIP_WAL);
for (byte[] family: families) {
- put.add(family, q, value);
+ put.addColumn(family, q, value);
}
return put;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
index 1c2f161..0c06588 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
@@ -508,7 +508,7 @@ public class TestQuotaThrottle {
try {
while (count < maxOps) {
Put put = new Put(Bytes.toBytes("row-" + count));
- put.add(FAMILY, QUALIFIER, Bytes.toBytes("data-" + count));
+ put.addColumn(FAMILY, QUALIFIER, Bytes.toBytes("data-" + count));
for (final Table table: tables) {
table.put(put);
}
[6/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
Posted by jm...@apache.org.
HBASE-14675 Exorcise deprecated Put#add(...) and replace with Put#addColumn(...)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/de9555ce
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/de9555ce
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/de9555ce
Branch: refs/heads/master
Commit: de9555cec4c10f86226f2dc43ef1ffc69a4e65cb
Parents: 452e38f
Author: Jonathan M Hsieh <jm...@apache.org>
Authored: Sun Oct 25 14:56:12 2015 -0700
Committer: Jonathan M Hsieh <jm...@apache.org>
Committed: Thu Oct 29 11:15:31 2015 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/MetaTableAccessor.java | 2 +-
.../org/apache/hadoop/hbase/client/Put.java | 53 +--
.../hadoop/hbase/client/TestAsyncProcess.java | 2 +-
.../hbase/client/TestClientNoCluster.java | 2 +-
.../hadoop/hbase/client/TestOperation.java | 2 +-
.../hadoop/hbase/client/TestPutDotHas.java | 2 +-
.../hadoop/hbase/mapreduce/IndexBuilder.java | 2 +-
.../hadoop/hbase/mapreduce/SampleUploader.java | 3 +-
.../example/TestBulkDeleteProtocol.java | 66 +--
.../example/TestRowCountEndpoint.java | 2 +-
.../TestZooKeeperScanPolicyObserver.java | 4 +-
.../hadoop/hbase/mttr/IntegrationTestMTTR.java | 2 +-
...egrationTestBigLinkedListWithVisibility.java | 7 +-
.../test/IntegrationTestLoadAndVerify.java | 4 +-
...tionTestWithCellVisibilityLoadAndVerify.java | 2 +-
.../trace/IntegrationTestSendTraceRequests.java | 2 +-
.../hbase/rest/PerformanceEvaluation.java | 4 +-
.../hadoop/hbase/rest/TestScannerResource.java | 2 +-
.../hbase/rest/TestScannersWithFilters.java | 8 +-
.../hbase/rest/TestScannersWithLabels.java | 2 +-
.../hadoop/hbase/rest/TestTableResource.java | 2 +-
.../rest/client/TestRemoteHTableRetries.java | 4 +-
.../hbase/rest/client/TestRemoteTable.java | 38 +-
.../apache/hadoop/hbase/quotas/QuotaUtil.java | 2 +-
.../RegionMergeTransactionImpl.java | 18 +-
.../regionserver/SplitTransactionImpl.java | 10 +-
.../org/apache/hadoop/hbase/HBaseTestCase.java | 5 +-
.../hadoop/hbase/HBaseTestingUtility.java | 9 +-
.../hadoop/hbase/PerformanceEvaluation.java | 4 +-
.../apache/hadoop/hbase/TestAcidGuarantees.java | 2 +-
.../hadoop/hbase/TestHBaseTestingUtility.java | 2 +-
.../apache/hadoop/hbase/TestMultiVersions.java | 3 +-
.../org/apache/hadoop/hbase/TestNamespace.java | 2 +-
.../org/apache/hadoop/hbase/TestZooKeeper.java | 6 +-
.../TestZooKeeperTableArchiveClient.java | 2 +-
.../apache/hadoop/hbase/client/TestAdmin1.java | 10 +-
.../apache/hadoop/hbase/client/TestAdmin2.java | 4 +-
.../client/TestBlockEvictionFromClient.java | 54 +--
.../hadoop/hbase/client/TestCheckAndMutate.java | 9 +-
.../client/TestClientOperationInterrupt.java | 2 +-
.../client/TestClientScannerRPCTimeout.java | 2 +-
.../hadoop/hbase/client/TestFastFail.java | 2 +-
.../hadoop/hbase/client/TestFromClientSide.java | 465 ++++++++++---------
.../hbase/client/TestFromClientSide3.java | 16 +-
.../hbase/client/TestFromClientSideNoCodec.java | 4 +-
.../org/apache/hadoop/hbase/client/TestHCM.java | 168 +++----
.../hbase/client/TestHTableMultiplexer.java | 6 +-
.../client/TestHTableMultiplexerFlushCache.java | 2 +-
.../hbase/client/TestMetaWithReplicas.java | 2 +-
.../hadoop/hbase/client/TestMultiParallel.java | 18 +-
.../hbase/client/TestMultipleTimestamps.java | 6 +-
.../client/TestPutDeleteEtcCellIteration.java | 6 +-
.../hadoop/hbase/client/TestPutWithDelete.java | 14 +-
.../hbase/client/TestReplicaWithCluster.java | 4 +-
.../hadoop/hbase/client/TestReplicasClient.java | 14 +-
.../hbase/client/TestRpcControllerFactory.java | 4 +-
.../hadoop/hbase/client/TestScannerTimeout.java | 4 +-
.../hbase/client/TestTimestampsFilter.java | 20 +-
.../hadoop/hbase/constraint/TestConstraint.java | 17 +-
.../coprocessor/TestAggregateProtocol.java | 6 +-
.../TestBatchCoprocessorEndpoint.java | 2 +-
.../TestDoubleColumnInterpreter.java | 5 +-
.../hbase/coprocessor/TestHTableWrapper.java | 18 +-
.../coprocessor/TestOpenTableInCoprocessor.java | 4 +-
.../coprocessor/TestRegionObserverBypass.java | 32 +-
.../TestRegionObserverInterface.java | 36 +-
.../TestRegionObserverScannerOpenHook.java | 8 +-
.../coprocessor/TestRegionObserverStacking.java | 2 +-
...gionServerCoprocessorExceptionWithAbort.java | 2 +-
.../coprocessor/TestRowProcessorEndpoint.java | 10 +-
.../hbase/coprocessor/TestWALObserver.java | 2 +-
.../hbase/filter/TestDependentColumnFilter.java | 18 +-
.../apache/hadoop/hbase/filter/TestFilter.java | 32 +-
.../hbase/filter/TestFilterWithScanLimits.java | 3 +-
.../hadoop/hbase/filter/TestFilterWrapper.java | 4 +-
.../TestFuzzyRowAndColumnRangeFilter.java | 2 +-
.../filter/TestFuzzyRowFilterEndToEnd.java | 4 +-
.../filter/TestInvocationRecordFilter.java | 4 +-
.../hadoop/hbase/filter/TestScanRowPrefix.java | 2 +-
.../hadoop/hbase/fs/TestBlockReorder.java | 2 +-
.../hbase/io/encoding/TestChangingEncoding.java | 3 +-
.../hbase/io/encoding/TestEncodedSeekers.java | 2 +-
.../io/hfile/TestForceCacheImportantBlocks.java | 4 +-
.../TestScannerSelectionUsingKeyRange.java | 4 +-
.../io/hfile/TestScannerSelectionUsingTTL.java | 4 +-
.../hbase/mapred/TestTableInputFormat.java | 4 +-
.../hbase/mapred/TestTableMapReduceUtil.java | 10 +-
.../hadoop/hbase/mapreduce/TestCellCounter.java | 163 +++----
.../hadoop/hbase/mapreduce/TestCopyTable.java | 20 +-
.../hbase/mapreduce/TestHFileOutputFormat.java | 2 +-
.../hbase/mapreduce/TestHFileOutputFormat2.java | 2 +-
.../hbase/mapreduce/TestImportExport.java | 63 ++-
.../mapreduce/TestMultithreadedTableMapper.java | 3 +-
.../hadoop/hbase/mapreduce/TestRowCounter.java | 14 +-
.../hbase/mapreduce/TestTableInputFormat.java | 5 +-
.../hbase/mapreduce/TestTableMapReduce.java | 2 +-
.../hbase/mapreduce/TestTableMapReduceBase.java | 2 +-
.../hbase/mapreduce/TestTimeRangeMapRed.java | 4 +-
.../hadoop/hbase/mapreduce/TestWALPlayer.java | 4 +-
.../hbase/master/TestAssignmentListener.java | 2 +-
.../master/TestGetLastFlushedSequenceId.java | 3 +-
.../hbase/master/TestMasterTransitions.java | 3 +-
.../hadoop/hbase/master/TestWarmupRegion.java | 2 +-
.../TestSimpleRegionNormalizerOnCluster.java | 2 +-
.../MasterProcedureTestingUtility.java | 2 +-
.../hadoop/hbase/quotas/TestQuotaThrottle.java | 2 +-
.../hbase/regionserver/TestAtomicOperation.java | 14 +-
.../hbase/regionserver/TestBlocksRead.java | 2 +-
.../hbase/regionserver/TestCompactionState.java | 2 +-
.../TestCorruptedRegionStoreFile.java | 2 +-
.../regionserver/TestEncryptionKeyRotation.java | 2 +-
.../TestEncryptionRandomKeying.java | 2 +-
.../hadoop/hbase/regionserver/TestHRegion.java | 233 +++++-----
.../regionserver/TestHRegionOnCluster.java | 2 +-
.../regionserver/TestHRegionReplayEvents.java | 6 +-
.../hbase/regionserver/TestJoinedScanners.java | 6 +-
.../hbase/regionserver/TestKeepDeletes.java | 112 ++---
.../hbase/regionserver/TestMinVersions.java | 56 +--
.../hbase/regionserver/TestParallelPut.java | 4 +-
.../TestRegionMergeTransaction.java | 2 +-
.../TestRegionMergeTransactionOnCluster.java | 2 +-
.../hbase/regionserver/TestRegionReplicas.java | 2 +-
.../regionserver/TestRegionServerMetrics.java | 24 +-
.../regionserver/TestRegionServerNoMaster.java | 2 +-
.../hbase/regionserver/TestRowTooBig.java | 6 +-
.../regionserver/TestSCVFWithMiniCluster.java | 22 +-
.../TestScannerRetriableFailure.java | 2 +-
.../regionserver/TestSeekOptimizations.java | 2 +-
.../TestSplitTransactionOnCluster.java | 22 +-
.../regionserver/TestSplitWalDataLoss.java | 6 +-
.../TestStoreFileRefresherChore.java | 2 +-
.../hadoop/hbase/regionserver/TestTags.java | 30 +-
.../hbase/regionserver/TestWideScanner.java | 3 +-
.../TestCompactionWithThroughputController.java | 35 +-
.../hbase/regionserver/wal/TestDurability.java | 2 +-
.../regionserver/wal/TestLogRollAbort.java | 4 +-
.../regionserver/wal/TestLogRollPeriod.java | 2 +-
.../hbase/regionserver/wal/TestLogRolling.java | 6 +-
.../hbase/regionserver/wal/TestWALReplay.java | 12 +-
.../replication/TestMasterReplication.java | 4 +-
.../replication/TestMultiSlaveReplication.java | 4 +-
.../replication/TestPerTableCFReplication.java | 2 +-
...estReplicationChangingPeerRegionservers.java | 2 +-
.../TestReplicationDisableInactivePeer.java | 2 +-
.../replication/TestReplicationEndpoint.java | 2 +-
.../replication/TestReplicationSmallTests.java | 24 +-
.../replication/TestReplicationSyncUpTool.java | 16 +-
.../replication/TestReplicationWithTags.java | 2 +-
.../access/TestAccessControlFilter.java | 4 +-
.../security/access/TestAccessController.java | 18 +-
.../security/access/TestAccessController2.java | 4 +-
.../access/TestCellACLWithMultipleVersions.java | 106 ++---
.../hbase/security/access/TestCellACLs.java | 10 +-
.../access/TestScanEarlyTermination.java | 6 +-
.../security/access/TestTablePermissions.java | 4 +-
.../access/TestWithDisabledAuthorization.java | 8 +-
.../TestDefaultScanLabelGeneratorStack.java | 6 +-
.../TestEnforcingScanLabelGenerator.java | 6 +-
.../visibility/TestVisibilityLabels.java | 50 +-
.../TestVisibilityLabelsReplication.java | 2 +-
.../visibility/TestVisibilityLabelsWithACL.java | 2 +-
.../TestVisibilityLabelsWithDeletes.java | 78 ++--
.../TestVisibilityLabelsWithSLGStack.java | 4 +-
.../TestVisibilityLablesWithGroups.java | 6 +-
.../TestVisibilityWithCheckAuths.java | 4 +-
.../hbase/snapshot/SnapshotTestingUtils.java | 2 +-
.../hadoop/hbase/trace/TestHTraceHooks.java | 3 +-
.../hadoop/hbase/util/BaseTestHBaseFsck.java | 6 +-
.../hadoop/hbase/util/MultiThreadedUpdater.java | 2 +-
.../hadoop/hbase/util/MultiThreadedWriter.java | 6 +-
.../hbase/util/TestCoprocessorScanPolicy.java | 14 +-
.../hbase/util/TestHBaseFsckEncryption.java | 4 +-
.../hadoop/hbase/util/TestHBaseFsckOneRS.java | 10 +-
.../hadoop/hbase/util/TestHBaseFsckTwoRS.java | 6 +-
.../hadoop/hbase/util/TestMergeTable.java | 2 +-
.../apache/hadoop/hbase/util/TestMergeTool.java | 2 +-
.../util/hbck/OfflineMetaRebuildTestCore.java | 4 +-
.../hadoop/hbase/wal/TestWALFiltering.java | 2 +-
.../hbase/wal/WALPerformanceEvaluation.java | 3 +-
hbase-shell/src/main/ruby/hbase/admin.rb | 4 +-
hbase-shell/src/main/ruby/hbase/table.rb | 4 +-
181 files changed, 1365 insertions(+), 1380 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index 2fbfd9f..d3d7608 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -1713,7 +1713,7 @@ public class MetaTableAccessor {
public static Put makePutFromTableState(TableState state) {
long time = EnvironmentEdgeManager.currentTime();
Put put = new Put(state.getTableName().getName(), time);
- put.add(getTableFamily(), getStateColumn(), state.convert().toByteArray());
+ put.addColumn(getTableFamily(), getStateColumn(), state.convert().toByteArray());
return put;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
index 5e0a341..a2ca975 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
@@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.util.Bytes;
* Used to perform Put operations for a single row.
* <p>
* To perform a Put, instantiate a Put object with the row to insert to and
- * for eachumn to be inserted, execute {@link #add(byte[], byte[], byte[]) add} or
- * {@link #add(byte[], byte[], long, byte[]) add} if setting the timestamp.
+ * for eachumn to be inserted, execute {@link #addColumn(byte[], byte[], byte[]) add} or
+ * {@link #addColumn(byte[], byte[], long, byte[]) add} if setting the timestamp.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@@ -137,26 +137,13 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
* @param qualifier column qualifier
* @param value column value
* @return this
- * @deprecated Since 1.0.0. Use {@link #addColumn(byte[], byte[], byte[])}
- */
- @Deprecated
- public Put add(byte [] family, byte [] qualifier, byte [] value) {
- return addColumn(family, qualifier, value);
- }
-
- /**
- * Add the specified column and value to this Put operation.
- * @param family family name
- * @param qualifier column qualifier
- * @param value column value
- * @return this
*/
public Put addColumn(byte [] family, byte [] qualifier, byte [] value) {
return addColumn(family, qualifier, this.ts, value);
}
/**
- * See {@link #add(byte[], byte[], byte[])}. This version expects
+ * See {@link #addColumn(byte[], byte[], byte[])}. This version expects
* that the underlying arrays won't change. It's intended
* for usage internal HBase to and for advanced client applications.
*/
@@ -183,21 +170,6 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
* @param ts version timestamp
* @param value column value
* @return this
- * @deprecated Since 1.0.0. Use {@link #addColumn(byte[], byte[], long, byte[])}
- */
- @Deprecated
- public Put add(byte [] family, byte [] qualifier, long ts, byte [] value) {
- return addColumn(family, qualifier, ts, value);
- }
-
- /**
- * Add the specified column and value, with the specified timestamp as
- * its version to this Put operation.
- * @param family family name
- * @param qualifier column qualifier
- * @param ts version timestamp
- * @param value column value
- * @return this
*/
public Put addColumn(byte [] family, byte [] qualifier, long ts, byte [] value) {
if (ts < 0) {
@@ -211,7 +183,7 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
}
/**
- * See {@link #add(byte[], byte[], long, byte[])}. This version expects
+ * See {@link #addColumn(byte[], byte[], long, byte[])}. This version expects
* that the underlying arrays won't change. It's intended
* for usage internal HBase to and for advanced client applications.
*/
@@ -269,21 +241,6 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
* @param ts version timestamp
* @param value column value
* @return this
- * @deprecated Since 1.0.0. Use {@link Put#addColumn(byte[], ByteBuffer, long, ByteBuffer)}
- */
- @Deprecated
- public Put add(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
- return addColumn(family, qualifier, ts, value);
- }
-
- /**
- * Add the specified column and value, with the specified timestamp as
- * its version to this Put operation.
- * @param family family name
- * @param qualifier column qualifier
- * @param ts version timestamp
- * @param value column value
- * @return this
*/
public Put addColumn(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value) {
if (ts < 0) {
@@ -297,7 +254,7 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
}
/**
- * See {@link #add(byte[], ByteBuffer, long, ByteBuffer)}. This version expects
+ * See {@link #addColumn(byte[], ByteBuffer, long, ByteBuffer)}. This version expects
* that the underlying arrays won't change. It's intended
* for usage internal HBase to and for advanced client applications.
*/
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index a20ca4f..b784f7a 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -1071,7 +1071,7 @@ public class TestAsyncProcess {
throw new IllegalArgumentException("unknown " + regCnt);
}
- p.add(DUMMY_BYTES_1, DUMMY_BYTES_1, DUMMY_BYTES_1);
+ p.addColumn(DUMMY_BYTES_1, DUMMY_BYTES_1, DUMMY_BYTES_1);
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index e941440..0a5a37f 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -703,7 +703,7 @@ public class TestClientNoCluster extends Configured implements Tool {
for (int i = 0; i < namespaceSpan; i++) {
byte [] b = format(rd.nextLong());
Put p = new Put(b);
- p.add(HConstants.CATALOG_FAMILY, b, b);
+ p.addColumn(HConstants.CATALOG_FAMILY, b, b);
mutator.mutate(p);
if (i % printInterval == 0) {
LOG.info("Put " + printInterval + "/" + stopWatch.elapsedMillis());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index ae0d52c..13a2567 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -337,7 +337,7 @@ public class TestOperation {
// produce a Put operation
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
// get its JSON representation, and parse it
json = put.toJSON();
parsedJSON = mapper.readValue(json, HashMap.class);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
index c269e62..b90374b 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
@@ -41,7 +41,7 @@ public class TestPutDotHas {
@Before
public void setUp() {
- put.add(FAMILY_01, QUALIFIER_01, TS, VALUE_01);
+ put.addColumn(FAMILY_01, QUALIFIER_01, TS, VALUE_01);
}
@Test
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 07c1755..1dab633 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -89,7 +89,7 @@ public class IndexBuilder extends Configured implements Tool {
// original: row 123 attribute:phone 555-1212
// index: row 555-1212 INDEX:ROW 123
Put put = new Put(value);
- put.add(INDEX_COLUMN, INDEX_QUALIFIER, rowKey.get());
+ put.addColumn(INDEX_COLUMN, INDEX_QUALIFIER, rowKey.get());
context.write(tableName, put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
index 011147a..18eb5a6 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
@@ -91,7 +90,7 @@ public class SampleUploader extends Configured implements Tool {
// Create Put
Put put = new Put(row);
- put.add(family, qualifier, value);
+ put.addColumn(family, qualifier, value);
// Uncomment below to disable WAL. This will improve performance but means
// you will experience data loss in the case of a RegionServer crash.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
index 930b899..317081b 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
@@ -223,8 +223,8 @@ public class TestBulkDeleteProtocol {
List<Put> puts = new ArrayList<Put>(100);
for (int j = 0; j < 100; j++) {
Put put = new Put(Bytes.toBytes(j));
- put.add(FAMILY1, QUALIFIER1, "v1".getBytes());
- put.add(FAMILY2, QUALIFIER2, "v2".getBytes());
+ put.addColumn(FAMILY1, QUALIFIER1, "v1".getBytes());
+ put.addColumn(FAMILY2, QUALIFIER2, "v2".getBytes());
puts.add(put);
}
ht.put(puts);
@@ -251,15 +251,15 @@ public class TestBulkDeleteProtocol {
for (int j = 0; j < 100; j++) {
Put put = new Put(Bytes.toBytes(j));
byte[] value = "v1".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1234L, value);
- put.add(FAMILY1, QUALIFIER2, 1234L, value);
- put.add(FAMILY1, QUALIFIER3, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1234L, value);
// Latest version values
value = "v2".getBytes();
- put.add(FAMILY1, QUALIFIER1, value);
- put.add(FAMILY1, QUALIFIER2, value);
- put.add(FAMILY1, QUALIFIER3, value);
- put.add(FAMILY1, null, value);
+ put.addColumn(FAMILY1, QUALIFIER1, value);
+ put.addColumn(FAMILY1, QUALIFIER2, value);
+ put.addColumn(FAMILY1, QUALIFIER3, value);
+ put.addColumn(FAMILY1, null, value);
puts.add(put);
}
ht.put(puts);
@@ -300,19 +300,19 @@ public class TestBulkDeleteProtocol {
Put put = new Put(Bytes.toBytes(j));
// TS = 1000L
byte[] value = "v1".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1000L, value);
- put.add(FAMILY1, QUALIFIER2, 1000L, value);
- put.add(FAMILY1, QUALIFIER3, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1000L, value);
// TS = 1234L
value = "v2".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1234L, value);
- put.add(FAMILY1, QUALIFIER2, 1234L, value);
- put.add(FAMILY1, QUALIFIER3, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1234L, value);
// Latest version values
value = "v3".getBytes();
- put.add(FAMILY1, QUALIFIER1, value);
- put.add(FAMILY1, QUALIFIER2, value);
- put.add(FAMILY1, QUALIFIER3, value);
+ put.addColumn(FAMILY1, QUALIFIER1, value);
+ put.addColumn(FAMILY1, QUALIFIER2, value);
+ put.addColumn(FAMILY1, QUALIFIER3, value);
puts.add(put);
}
ht.put(puts);
@@ -347,24 +347,24 @@ public class TestBulkDeleteProtocol {
Put put = new Put(Bytes.toBytes(j));
// TS = 1000L
byte[] value = "v1".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1000L, value);
- put.add(FAMILY1, QUALIFIER2, 1000L, value);
- put.add(FAMILY1, QUALIFIER3, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1000L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1000L, value);
// TS = 1234L
value = "v2".getBytes();
- put.add(FAMILY1, QUALIFIER1, 1234L, value);
- put.add(FAMILY1, QUALIFIER2, 1234L, value);
- put.add(FAMILY1, QUALIFIER3, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 1234L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 1234L, value);
// TS = 2000L
value = "v3".getBytes();
- put.add(FAMILY1, QUALIFIER1, 2000L, value);
- put.add(FAMILY1, QUALIFIER2, 2000L, value);
- put.add(FAMILY1, QUALIFIER3, 2000L, value);
+ put.addColumn(FAMILY1, QUALIFIER1, 2000L, value);
+ put.addColumn(FAMILY1, QUALIFIER2, 2000L, value);
+ put.addColumn(FAMILY1, QUALIFIER3, 2000L, value);
// Latest version values
value = "v4".getBytes();
- put.add(FAMILY1, QUALIFIER1, value);
- put.add(FAMILY1, QUALIFIER2, value);
- put.add(FAMILY1, QUALIFIER3, value);
+ put.addColumn(FAMILY1, QUALIFIER1, value);
+ put.addColumn(FAMILY1, QUALIFIER2, value);
+ put.addColumn(FAMILY1, QUALIFIER3, value);
puts.add(put);
}
ht.put(puts);
@@ -435,9 +435,9 @@ public class TestBulkDeleteProtocol {
private Put createPut(byte[] rowkey, String value) throws IOException {
Put put = new Put(rowkey);
- put.add(FAMILY1, QUALIFIER1, value.getBytes());
- put.add(FAMILY1, QUALIFIER2, value.getBytes());
- put.add(FAMILY1, QUALIFIER3, value.getBytes());
+ put.addColumn(FAMILY1, QUALIFIER1, value.getBytes());
+ put.addColumn(FAMILY1, QUALIFIER2, value.getBytes());
+ put.addColumn(FAMILY1, QUALIFIER3, value.getBytes());
return put;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
index fd15234..1776ced 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java
@@ -76,7 +76,7 @@ public class TestRowCountEndpoint {
for (int i=0; i<5; i++) {
byte[] iBytes = Bytes.toBytes(i);
Put p = new Put(iBytes);
- p.add(TEST_FAMILY, TEST_COLUMN, iBytes);
+ p.addColumn(TEST_FAMILY, TEST_COLUMN, iBytes);
table.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
index 0430a41..e97d528 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
@@ -93,10 +93,10 @@ public class TestZooKeeperScanPolicyObserver {
long ts = now - 2000;
Put p = new Put(R);
- p.add(F, Q, ts, Q);
+ p.addColumn(F, Q, ts, Q);
t.put(p);
p = new Put(R);
- p.add(F, Q, ts+1, Q);
+ p.addColumn(F, Q, ts + 1, Q);
t.put(p);
// these two should be expired but for the override
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index 4423650..437f200 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -496,7 +496,7 @@ public class IntegrationTestMTTR {
@Override
protected boolean doAction() throws Exception {
Put p = new Put(Bytes.toBytes(RandomStringUtils.randomAlphanumeric(5)));
- p.add(FAMILY, Bytes.toBytes("\0"), Bytes.toBytes(RandomStringUtils.randomAscii(5)));
+ p.addColumn(FAMILY, Bytes.toBytes("\0"), Bytes.toBytes(RandomStringUtils.randomAscii(5)));
table.put(p);
return true;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
index adc0eb7..c908474 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
@@ -211,13 +211,14 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
for (int i = 0; i < current.length; i++) {
for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) {
Put put = new Put(current[i]);
- put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]);
+ byte[] value = prev == null ? NO_KEY : prev[i];
+ put.addColumn(FAMILY_NAME, COLUMN_PREV, value);
if (count >= 0) {
- put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
+ put.addColumn(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
}
if (id != null) {
- put.add(FAMILY_NAME, COLUMN_CLIENT, id);
+ put.addColumn(FAMILY_NAME, COLUMN_CLIENT, id);
}
visibilityExps = split[j * 2] + OR + split[(j * 2) + 1];
put.setCellVisibility(new CellVisibility(visibilityExps));
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
index 623a370..e279dfb 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
@@ -242,12 +242,12 @@ public void cleanUpCluster() throws Exception {
Bytes.putLong(row, 0, byteSwapped);
Put p = new Put(row);
- p.add(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
+ p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
if (blockStart > 0) {
for (int j = 0; j < numBackReferencesPerRow; j++) {
long referredRow = blockStart - BLOCK_SIZE + rand.nextInt(BLOCK_SIZE);
Bytes.putLong(row, 0, swapLong(referredRow));
- p.add(TEST_FAMILY, row, HConstants.EMPTY_BYTE_ARRAY);
+ p.addColumn(TEST_FAMILY, row, HConstants.EMPTY_BYTE_ARRAY);
}
refsWritten.increment(1);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
index b797740..52a705b 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java
@@ -174,7 +174,7 @@ public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationT
String exp = VISIBILITY_EXPS[expIdx];
byte[] row = Bytes.add(Bytes.toBytes(i), Bytes.toBytes(suffix), Bytes.toBytes(exp));
Put p = new Put(row);
- p.add(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
+ p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY);
p.setCellVisibility(new CellVisibility(exp));
getCounter(expIdx).increment(1);
mutator.mutate(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
index 3fa8a9c..f325aac 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java
@@ -245,7 +245,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool {
Put p = new Put(Bytes.toBytes(rk));
for (int y = 0; y < 10; y++) {
random.nextBytes(value);
- p.add(familyName, Bytes.toBytes(random.nextLong()), value);
+ p.addColumn(familyName, Bytes.toBytes(random.nextLong()), value);
}
ht.mutate(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
index e91f873..8424bf9 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
@@ -1131,7 +1131,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
value, tags);
put.add(kv);
} else {
- put.add(FAMILY_NAME, QUALIFIER_NAME, value);
+ put.addColumn(FAMILY_NAME, QUALIFIER_NAME, value);
}
put.setDurability(writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
mutator.mutate(put);
@@ -1202,7 +1202,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
value, tags);
put.add(kv);
} else {
- put.add(FAMILY_NAME, QUALIFIER_NAME, value);
+ put.addColumn(FAMILY_NAME, QUALIFIER_NAME, value);
}
put.setDurability(writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
mutator.mutate(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index 4f4f698..5114b11 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -100,7 +100,7 @@ public class TestScannerResource {
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
- put.add(famAndQf[0], famAndQf[1], k);
+ put.addColumn(famAndQf[0], famAndQf[1], k);
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index 22ee31d..9b68806 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -149,7 +149,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
}
table.put(p);
}
@@ -157,7 +157,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
}
table.put(p);
}
@@ -167,7 +167,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
- p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+ p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
}
table.put(p);
}
@@ -175,7 +175,7 @@ public class TestScannersWithFilters {
Put p = new Put(ROW);
p.setDurability(Durability.SKIP_WAL);
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
- p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+ p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
}
table.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
index 83c72e3..cb235aa 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
@@ -98,7 +98,7 @@ public class TestScannersWithLabels {
for (int i = 0; i < 9; i++) {
Put put = new Put(Bytes.toBytes("row" + i));
put.setDurability(Durability.SKIP_WAL);
- put.add(famAndQf[0], famAndQf[1], k);
+ put.addColumn(famAndQf[0], famAndQf[1], k);
put.setCellVisibility(new CellVisibility("(" + SECRET + "|" + CONFIDENTIAL + ")" + "&" + "!"
+ TOPSECRET));
puts.add(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
index b0b8fef..8fe5c98 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
@@ -108,7 +108,7 @@ public class TestTableResource {
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
- put.add(famAndQf[0], famAndQf[1], k);
+ put.addColumn(famAndQf[0], famAndQf[1], k);
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
index 5b18a6a..4a595f3 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java
@@ -151,7 +151,7 @@ public class TestRemoteHTableRetries {
@Override
public void run() throws Exception {
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, put );
}
});
@@ -164,7 +164,7 @@ public class TestRemoteHTableRetries {
@Override
public void run() throws Exception {
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
Delete delete= new Delete(ROW_1);
remoteTable.checkAndDelete(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, delete );
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
index 297162b..19d0587 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
@@ -100,12 +100,12 @@ public class TestRemoteTable {
admin.createTable(htd);
try (Table table = TEST_UTIL.getConnection().getTable(TABLE)) {
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, TS_2, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, TS_2, VALUE_1);
table.put(put);
put = new Put(ROW_2);
- put.add(COLUMN_1, QUALIFIER_1, TS_1, VALUE_1);
- put.add(COLUMN_1, QUALIFIER_1, TS_2, VALUE_2);
- put.add(COLUMN_2, QUALIFIER_2, TS_2, VALUE_2);
+ put.addColumn(COLUMN_1, QUALIFIER_1, TS_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, TS_2, VALUE_2);
+ put.addColumn(COLUMN_2, QUALIFIER_2, TS_2, VALUE_2);
table.put(put);
}
remoteTable = new RemoteHTable(
@@ -282,7 +282,7 @@ public class TestRemoteTable {
@Test
public void testPut() throws IOException {
Put put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
remoteTable.put(put);
Get get = new Get(ROW_3);
@@ -296,13 +296,13 @@ public class TestRemoteTable {
List<Put> puts = new ArrayList<Put>();
put = new Put(ROW_3);
- put.add(COLUMN_2, QUALIFIER_2, VALUE_2);
+ put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_2, QUALIFIER_2, VALUE_2);
+ put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
puts.add(put);
remoteTable.put(puts);
@@ -327,8 +327,8 @@ public class TestRemoteTable {
@Test
public void testDelete() throws IOException {
Put put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
- put.add(COLUMN_2, QUALIFIER_2, VALUE_2);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_2, QUALIFIER_2, VALUE_2);
remoteTable.put(put);
Get get = new Get(ROW_3);
@@ -390,16 +390,16 @@ public class TestRemoteTable {
public void testScanner() throws IOException {
List<Put> puts = new ArrayList<Put>();
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_2);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
remoteTable.put(puts);
@@ -465,7 +465,7 @@ public class TestRemoteTable {
assertFalse(remoteTable.exists(get));
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
remoteTable.put(put);
assertTrue(remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1,
@@ -481,16 +481,16 @@ public class TestRemoteTable {
public void testIteratorScaner() throws IOException {
List<Put> puts = new ArrayList<Put>();
Put put = new Put(ROW_1);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_2);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_3);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
put = new Put(ROW_4);
- put.add(COLUMN_1, QUALIFIER_1, VALUE_1);
+ put.addColumn(COLUMN_1, QUALIFIER_1, VALUE_1);
puts.add(put);
remoteTable.put(puts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
index bff648d..6d219e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
@@ -144,7 +144,7 @@ public class QuotaUtil extends QuotaTableUtil {
private static void addQuotas(final Connection connection, final byte[] rowKey,
final byte[] qualifier, final Quotas data) throws IOException {
Put put = new Put(rowKey);
- put.add(QUOTA_FAMILY_INFO, qualifier, quotasToData(data));
+ put.addColumn(QUOTA_FAMILY_INFO, qualifier, quotasToData(data));
doPut(connection, put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
index 28cb9b9..5c177d1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeTransactionImpl.java
@@ -410,10 +410,10 @@ public class RegionMergeTransactionImpl implements RegionMergeTransaction {
// Put for parent
Put putOfMerged = MetaTableAccessor.makePutFromRegionInfo(copyOfMerged, time);
- putOfMerged.add(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER,
- regionA.toByteArray());
- putOfMerged.add(HConstants.CATALOG_FAMILY, HConstants.MERGEB_QUALIFIER,
- regionB.toByteArray());
+ putOfMerged.addColumn(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER,
+ regionA.toByteArray());
+ putOfMerged.addColumn(HConstants.CATALOG_FAMILY, HConstants.MERGEB_QUALIFIER,
+ regionB.toByteArray());
mutations.add(putOfMerged);
// Deletes for merging regions
Delete deleteA = MetaTableAccessor.makeDeleteFromRegionInfo(regionA, time);
@@ -426,11 +426,11 @@ public class RegionMergeTransactionImpl implements RegionMergeTransaction {
@VisibleForTesting
Put addLocation(final Put p, final ServerName sn, long openSeqNum) {
- p.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
- .toBytes(sn.getHostAndPort()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
- .getStartcode()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
+ .toBytes(sn.getHostAndPort()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
+ .getStartcode()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
index fbfea8e..70d040e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java
@@ -329,11 +329,11 @@ public class SplitTransactionImpl implements SplitTransaction {
@VisibleForTesting
Put addLocation(final Put p, final ServerName sn, long openSeqNum) {
- p.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
- .toBytes(sn.getHostAndPort()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
- .getStartcode()));
- p.add(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, Bytes
+ .toBytes(sn.getHostAndPort()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, Bytes.toBytes(sn
+ .getStartcode()));
+ p.addColumn(HConstants.CATALOG_FAMILY, HConstants.SEQNUM_QUALIFIER, Bytes.toBytes(openSeqNum));
return p;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
index 1fb096d..153f36b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -328,9 +328,10 @@ public abstract class HBaseTestCase extends TestCase {
byte[][] split =
KeyValue.parseColumn(Bytes.toBytes(sb.toString()));
if(split.length == 1) {
- put.add(split[0], new byte[0], t);
+ byte[] qualifier = new byte[0];
+ put.addColumn(split[0], qualifier, t);
} else {
- put.add(split[0], split[1], t);
+ put.addColumn(split[0], split[1], t);
}
put.setDurability(Durability.SKIP_WAL);
updater.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index eb1494e..6f02ab0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1951,7 +1951,8 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
Put put = new Put(row);
put.setDurability(writeToWAL ? Durability.USE_DEFAULT : Durability.SKIP_WAL);
for (int i = 0; i < f.length; i++) {
- put.add(f[i], null, value != null ? value : row);
+ byte[] value1 = value != null ? value : row;
+ put.addColumn(f[i], null, value1);
}
puts.add(put);
}
@@ -2038,7 +2039,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
k[2] = b3;
Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL);
- put.add(f, null, k);
+ put.addColumn(f, null, k);
if (r.getWAL() == null) {
put.setDurability(Durability.SKIP_WAL);
}
@@ -2068,7 +2069,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
for (int i = startRow; i < endRow; i++) {
byte[] data = Bytes.toBytes(String.valueOf(i));
Put put = new Put(data);
- put.add(f, null, data);
+ put.addColumn(f, null, data);
t.put(put);
}
}
@@ -3415,7 +3416,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
final byte[] value = Bytes.toBytes("value_for_row_" + iRow +
"_cf_" + Bytes.toStringBinary(cf) + "_col_" + iCol + "_ts_" +
ts + "_random_" + rand.nextLong());
- put.add(cf, qual, ts, value);
+ put.addColumn(cf, qual, ts, value);
} else if (rand.nextDouble() < 0.8) {
del.deleteColumn(cf, qual, ts);
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 569ef71..1c1c56c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -1413,7 +1413,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
put.add(kv);
updateValueSize(kv.getValueLength());
} else {
- put.add(FAMILY_NAME, qualifier, value);
+ put.addColumn(FAMILY_NAME, qualifier, value);
updateValueSize(value.length);
}
}
@@ -1501,7 +1501,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
put.add(kv);
updateValueSize(kv.getValueLength());
} else {
- put.add(FAMILY_NAME, qualifier, value);
+ put.addColumn(FAMILY_NAME, qualifier, value);
updateValueSize(value.length);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
index dbb6156..27b480d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
@@ -135,7 +135,7 @@ public class TestAcidGuarantees implements Tool {
for (byte[] family : targetFamilies) {
for (int i = 0; i < NUM_COLS_TO_CHECK; i++) {
byte qualifier[] = Bytes.toBytes("col" + i);
- p.add(family, qualifier, data);
+ p.addColumn(family, qualifier, data);
}
}
table.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
index 3a9c565..dd09c37 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
@@ -98,7 +98,7 @@ public class TestHBaseTestingUtility {
Table table2 = htu2.createTable(TABLE_NAME, FAM_NAME);
Put put = new Put(ROW);
- put.add(FAM_NAME, QUAL_NAME, VALUE);
+ put.addColumn(FAM_NAME, QUAL_NAME, VALUE);
table1.put(put);
Get get = new Get(ROW);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
index 439e722..9bd2fe9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
@@ -218,8 +218,7 @@ public class TestMultiVersions {
for (int i = 0; i < startKeys.length; i++) {
for (int j = 0; j < timestamp.length; j++) {
Put put = new Put(rows[i], timestamp[j]);
- put.addColumn(HConstants.CATALOG_FAMILY, null, timestamp[j],
- Bytes.toBytes(timestamp[j]));
+ put.addColumn(HConstants.CATALOG_FAMILY, null, timestamp[j], Bytes.toBytes(timestamp[j]));
puts.add(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
index 5208ec4..c24d8a3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java
@@ -248,7 +248,7 @@ public class TestNamespace {
//sanity check try to write and read from table
Table table = TEST_UTIL.getConnection().getTable(desc.getTableName());
Put p = new Put(Bytes.toBytes("row1"));
- p.add(Bytes.toBytes("my_cf"),Bytes.toBytes("my_col"),Bytes.toBytes("value1"));
+ p.addColumn(Bytes.toBytes("my_cf"), Bytes.toBytes("my_col"), Bytes.toBytes("value1"));
table.put(p);
//flush and read from disk to make sure directory changes are working
admin.flush(desc.getTableName());
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index ec3521c..6d4cab3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -261,8 +261,7 @@ public class TestZooKeeper {
Table table = TEST_UTIL.getConnection().getTable(desc.getTableName());
Put put = new Put(Bytes.toBytes("testrow"));
- put.add(Bytes.toBytes("fam"),
- Bytes.toBytes("col"), Bytes.toBytes("testdata"));
+ put.addColumn(Bytes.toBytes("fam"), Bytes.toBytes("col"), Bytes.toBytes("testdata"));
LOG.info("Putting table " + tableName);
table.put(put);
table.close();
@@ -611,7 +610,8 @@ public class TestZooKeeper {
int numberOfPuts;
for (numberOfPuts = 0; numberOfPuts < 6; numberOfPuts++) {
p = new Put(Bytes.toBytes(numberOfPuts));
- p.add(Bytes.toBytes("col"), Bytes.toBytes("ql"), Bytes.toBytes("value" + numberOfPuts));
+ p.addColumn(Bytes.toBytes("col"), Bytes.toBytes("ql"),
+ Bytes.toBytes("value" + numberOfPuts));
table.put(p);
}
m.getZooKeeper().close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
index eba3c0b..a28112d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java
@@ -403,7 +403,7 @@ public class TestZooKeeperTableArchiveClient {
private void createHFileInRegion(Region region, byte[] columnFamily) throws IOException {
// put one row in the region
Put p = new Put(Bytes.toBytes("row"));
- p.add(columnFamily, Bytes.toBytes("Qual"), Bytes.toBytes("v1"));
+ p.addColumn(columnFamily, Bytes.toBytes("Qual"), Bytes.toBytes("v1"));
region.put(p);
// flush the region to make a store file
region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
index 33c151d..0ba51b0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java
@@ -241,7 +241,7 @@ public class TestAdmin1 {
final TableName table = TableName.valueOf("testDisableAndEnableTable");
Table ht = TEST_UTIL.createTable(table, HConstants.CATALOG_FAMILY);
Put put = new Put(row);
- put.add(HConstants.CATALOG_FAMILY, qualifier, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, qualifier, value);
ht.put(put);
Get get = new Get(row);
get.addColumn(HConstants.CATALOG_FAMILY, qualifier);
@@ -308,7 +308,7 @@ public class TestAdmin1 {
Table ht1 = TEST_UTIL.createTable(table1, HConstants.CATALOG_FAMILY);
Table ht2 = TEST_UTIL.createTable(table2, HConstants.CATALOG_FAMILY);
Put put = new Put(row);
- put.add(HConstants.CATALOG_FAMILY, qualifier, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, qualifier, value);
ht1.put(put);
ht2.put(put);
Get get = new Get(row);
@@ -1187,13 +1187,13 @@ public class TestAdmin1 {
List<Put> puts = new ArrayList<Put>();
byte[] qualifier = "c".getBytes();
Put put = new Put(new byte[]{(byte)'1'});
- put.add(cf, qualifier, "100".getBytes());
+ put.addColumn(cf, qualifier, "100".getBytes());
puts.add(put);
put = new Put(new byte[]{(byte)'6'});
- put.add(cf, qualifier, "100".getBytes());
+ put.addColumn(cf, qualifier, "100".getBytes());
puts.add(put);
put = new Put(new byte[]{(byte)'8'});
- put.add(cf, qualifier, "100".getBytes());
+ put.addColumn(cf, qualifier, "100".getBytes());
puts.add(put);
ht.put(puts);
ht.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 33203fc..fe311a6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -218,7 +218,7 @@ public class TestAdmin2 {
byte[] value = Bytes.toBytes("somedata");
// This used to use an empty row... That must have been a bug
Put put = new Put(value);
- put.add(HConstants.CATALOG_FAMILY, HConstants.CATALOG_FAMILY, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, HConstants.CATALOG_FAMILY, value);
table.put(put);
table.close();
}
@@ -614,7 +614,7 @@ public class TestAdmin2 {
HRegionServer regionServer = TEST_UTIL.getRSForFirstRegionInTable(tableName);
for (int i = 1; i <= 256; i++) { // 256 writes should cause 8 log rolls
Put put = new Put(Bytes.toBytes("row" + String.format("%1$04d", i)));
- put.add(HConstants.CATALOG_FAMILY, null, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, null, value);
table.put(put);
if (i % 32 == 0) {
// After every 32 writes sleep to let the log roller run
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
index d175744..6dedee2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestBlockEvictionFromClient.java
@@ -183,10 +183,10 @@ public class TestBlockEvictionFromClient {
// insert data. 2 Rows are added
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
assertTrue(Bytes.equals(table.get(new Get(ROW)).value(), data));
// data was in memstore so don't expect any changes
@@ -214,7 +214,7 @@ public class TestBlockEvictionFromClient {
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
byte[] data2 = Bytes.add(data, data);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
Result r = table.get(new Get(ROW));
assertTrue(Bytes.equals(r.getValue(FAMILY, QUALIFIER), data));
@@ -332,16 +332,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = cacheConf.getBlockCache();
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -389,16 +389,16 @@ public class TestBlockEvictionFromClient {
regionName);
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
for (int i = 1; i < 10; i++) {
put = new Put(ROW);
- put.add(FAMILY, Bytes.toBytes("testQualifier" + i), data2);
+ put.addColumn(FAMILY, Bytes.toBytes("testQualifier" + i), data2);
table.put(put);
if (i % 2 == 0) {
region.flush(true);
@@ -406,7 +406,7 @@ public class TestBlockEvictionFromClient {
}
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -483,16 +483,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
for (int i = 1; i < 10; i++) {
put = new Put(ROW);
- put.add(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
+ put.addColumn(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
table.put(put);
if (i % 2 == 0) {
region.flush(true);
@@ -501,7 +501,7 @@ public class TestBlockEvictionFromClient {
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -577,16 +577,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = cacheConf.getBlockCache();
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -656,16 +656,16 @@ public class TestBlockEvictionFromClient {
BlockCache cache = setCacheProperties(region);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
region.flush(true);
for (int i = 1; i < 10; i++) {
put = new Put(ROW);
- put.add(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
+ put.addColumn(Bytes.toBytes("testFamily" + i), Bytes.toBytes("testQualifier" + i), data2);
table.put(put);
if (i % 2 == 0) {
region.flush(true);
@@ -674,7 +674,7 @@ public class TestBlockEvictionFromClient {
region.flush(true);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
region.flush(true);
// flush the data
@@ -827,10 +827,10 @@ public class TestBlockEvictionFromClient {
// insert data. 2 Rows are added
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
assertTrue(Bytes.equals(table.get(new Get(ROW)).value(), data));
// Should create one Hfile with 2 blocks
@@ -842,7 +842,7 @@ public class TestBlockEvictionFromClient {
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
byte[] data2 = Bytes.add(data, data);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
// flush, one new block
System.out.println("Flushing cache");
@@ -1042,14 +1042,14 @@ public class TestBlockEvictionFromClient {
private void insertData(HTable table) throws IOException {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
put = new Put(ROW1);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
byte[] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
index b68381f..082de09 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCheckAndMutate.java
@@ -62,9 +62,9 @@ public class TestCheckAndMutate {
try {
// put one row
Put put = new Put(rowKey);
- put.add(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
- put.add(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
- put.add(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
+ put.addColumn(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
+ put.addColumn(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
+ put.addColumn(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
table.put(put);
// get row back and assert the values
Get get = new Get(rowKey);
@@ -102,7 +102,8 @@ public class TestCheckAndMutate {
//Test that we get a region level exception
try {
Put p = new Put(rowKey);
- p.add(new byte[]{'b', 'o', 'g', 'u', 's'}, new byte[]{'A'}, new byte[0]);
+ byte[] value = new byte[0];
+ p.addColumn(new byte[]{'b', 'o', 'g', 'u', 's'}, new byte[]{'A'}, value);
rm = new RowMutations(rowKey);
rm.add(p);
table.checkAndMutate(rowKey, family, Bytes.toBytes("A"), CompareFilter.CompareOp.EQUAL,
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
index 072098e..a4603b7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientOperationInterrupt.java
@@ -85,7 +85,7 @@ public class TestClientOperationInterrupt {
Table ht = util.createTable(tableName, new byte[][]{dummy, test});
Put p = new Put(row1);
- p.add(dummy, dummy, dummy);
+ p.addColumn(dummy, dummy, dummy);
ht.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
index 8af1d9f..515e763 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
@@ -126,7 +126,7 @@ public class TestClientScannerRPCTimeout {
private void putToTable(Table ht, byte[] rowkey) throws IOException {
Put put = new Put(rowkey);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index 4be2ef0..5ceef01 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -125,7 +125,7 @@ public class TestFastFail {
byte[] rowKey = longToByteArrayKey(i);
Put put = new Put(rowKey);
byte[] value = rowKey; // value is the same as the row key
- put.add(FAMILY, QUALIFIER, value);
+ put.addColumn(FAMILY, QUALIFIER, value);
puts.add(put);
}
try (Table table = connection.getTable(TableName.valueOf(tableName))) {
[5/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
Posted by jm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 3e988d6..7901b81 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -193,13 +193,13 @@ public class TestFromClientSide {
long ts = System.currentTimeMillis();
Put p = new Put(T1, ts);
- p.add(FAMILY, C0, T1);
+ p.addColumn(FAMILY, C0, T1);
h.put(p);
p = new Put(T1, ts+2);
- p.add(FAMILY, C0, T2);
+ p.addColumn(FAMILY, C0, T2);
h.put(p);
p = new Put(T1, ts+4);
- p.add(FAMILY, C0, T3);
+ p.addColumn(FAMILY, C0, T3);
h.put(p);
Delete d = new Delete(T1, ts+3);
@@ -254,7 +254,7 @@ public class TestFromClientSide {
// future timestamp
long ts = System.currentTimeMillis() * 2;
Put put = new Put(ROW, ts);
- put.add(FAMILY, COLUMN, VALUE);
+ put.addColumn(FAMILY, COLUMN, VALUE);
table.put(put);
Get get = new Get(ROW);
@@ -283,7 +283,7 @@ public class TestFromClientSide {
});
put = new Put(ROW, ts);
- put.add(FAMILY, COLUMN, VALUE);
+ put.addColumn(FAMILY, COLUMN, VALUE);
table.put(put);
get = new Get(ROW);
@@ -418,14 +418,14 @@ public class TestFromClientSide {
value));
Put put = new Put(Bytes.toBytes(row));
put.setDurability(Durability.SKIP_WAL);
- put.add(Bytes.toBytes("trans-blob"), null, Bytes
+ put.addColumn(Bytes.toBytes("trans-blob"), null, Bytes
.toBytes("value for blob"));
- put.add(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
- put.add(Bytes.toBytes("trans-date"), null, Bytes
+ put.addColumn(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
+ put.addColumn(Bytes.toBytes("trans-date"), null, Bytes
.toBytes("20090921010101999"));
- put.add(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes
+ put.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes
.toBytes(value));
- put.add(Bytes.toBytes("trans-group"), null, Bytes
+ put.addColumn(Bytes.toBytes("trans-group"), null, Bytes
.toBytes("adhocTransactionGroupId"));
ht.put(put);
}
@@ -590,7 +590,7 @@ public class TestFromClientSide {
TableName TABLE = TableName.valueOf("testSuperSimple");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
Scan scan = new Scan();
scan.addColumn(FAMILY, TABLE.toBytes());
@@ -608,7 +608,7 @@ public class TestFromClientSide {
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
byte[] value = new byte[4 * 1024 * 1024];
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, value);
+ put.addColumn(FAMILY, QUALIFIER, value);
ht.put(put);
try {
TEST_UTIL.getConfiguration().setInt(
@@ -618,7 +618,7 @@ public class TestFromClientSide {
ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
try (Table t = connection.getTable(TableName.valueOf(FAMILY))) {
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, value);
+ put.addColumn(FAMILY, QUALIFIER, value);
t.put(put);
}
}
@@ -642,7 +642,7 @@ public class TestFromClientSide {
for(int i=0;i<10;i++) {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY, QUALIFIERS[i], VALUE);
+ put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
@@ -675,7 +675,7 @@ public class TestFromClientSide {
for(int i = 0; i < 10; i ++) {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY, QUALIFIER, values[i]);
+ put.addColumn(FAMILY, QUALIFIER, values[i]);
ht.put(put);
}
Scan scan = new Scan();
@@ -709,7 +709,7 @@ public class TestFromClientSide {
for(int i=0;i<10;i++) {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILY, QUALIFIERS[i], VALUE);
+ put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
@@ -778,7 +778,7 @@ public class TestFromClientSide {
// Insert a row
Put put = new Put(ROWS[2]);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
// Try to get empty rows around it
@@ -863,7 +863,7 @@ public class TestFromClientSide {
////////////////////////////////////////////////////////////////////////////
put = new Put(ROWS[0]);
- put.add(FAMILIES[4], QUALIFIERS[0], VALUES[0]);
+ put.addColumn(FAMILIES[4], QUALIFIERS[0], VALUES[0]);
ht.put(put);
// Get the single column
@@ -896,13 +896,13 @@ public class TestFromClientSide {
// Insert multiple columns to two other families
put = new Put(ROWS[0]);
- put.add(FAMILIES[2], QUALIFIERS[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIERS[4], VALUES[4]);
- put.add(FAMILIES[4], QUALIFIERS[4], VALUES[4]);
- put.add(FAMILIES[6], QUALIFIERS[6], VALUES[6]);
- put.add(FAMILIES[6], QUALIFIERS[7], VALUES[7]);
- put.add(FAMILIES[7], QUALIFIERS[7], VALUES[7]);
- put.add(FAMILIES[9], QUALIFIERS[0], VALUES[0]);
+ put.addColumn(FAMILIES[2], QUALIFIERS[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIERS[4], VALUES[4]);
+ put.addColumn(FAMILIES[4], QUALIFIERS[4], VALUES[4]);
+ put.addColumn(FAMILIES[6], QUALIFIERS[6], VALUES[6]);
+ put.addColumn(FAMILIES[6], QUALIFIERS[7], VALUES[7]);
+ put.addColumn(FAMILIES[7], QUALIFIERS[7], VALUES[7]);
+ put.addColumn(FAMILIES[9], QUALIFIERS[0], VALUES[0]);
ht.put(put);
// Get multiple columns across multiple families and get empties around it
@@ -923,10 +923,10 @@ public class TestFromClientSide {
// Insert more data to memstore
put = new Put(ROWS[0]);
- put.add(FAMILIES[6], QUALIFIERS[5], VALUES[5]);
- put.add(FAMILIES[6], QUALIFIERS[8], VALUES[8]);
- put.add(FAMILIES[6], QUALIFIERS[9], VALUES[9]);
- put.add(FAMILIES[4], QUALIFIERS[3], VALUES[3]);
+ put.addColumn(FAMILIES[6], QUALIFIERS[5], VALUES[5]);
+ put.addColumn(FAMILIES[6], QUALIFIERS[8], VALUES[8]);
+ put.addColumn(FAMILIES[6], QUALIFIERS[9], VALUES[9]);
+ put.addColumn(FAMILIES[4], QUALIFIERS[3], VALUES[3]);
ht.put(put);
////////////////////////////////////////////////////////////////////////////
@@ -1164,7 +1164,7 @@ public class TestFromClientSide {
// Null row (should NOT work)
try {
Put put = new Put((byte[])null);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
fail("Inserting a null row worked, should throw exception");
} catch(Exception e) {}
@@ -1172,7 +1172,7 @@ public class TestFromClientSide {
// Null qualifier (should work)
{
Put put = new Put(ROW);
- put.add(FAMILY, null, VALUE);
+ put.addColumn(FAMILY, null, VALUE);
ht.put(put);
getTestNull(ht, ROW, FAMILY, VALUE);
@@ -1195,7 +1195,7 @@ public class TestFromClientSide {
// Empty qualifier, byte[0] instead of null (should work)
try {
Put put = new Put(ROW);
- put.add(FAMILY, HConstants.EMPTY_BYTE_ARRAY, VALUE);
+ put.addColumn(FAMILY, HConstants.EMPTY_BYTE_ARRAY, VALUE);
ht.put(put);
getTestNull(ht, ROW, FAMILY, VALUE);
@@ -1225,7 +1225,7 @@ public class TestFromClientSide {
// Null value
try {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, null);
+ put.addColumn(FAMILY, QUALIFIER, null);
ht.put(put);
Get get = new Get(ROW);
@@ -1262,10 +1262,10 @@ public class TestFromClientSide {
// Insert 4 versions of same column
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
- put.add(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
ht.put(put);
// Verify we can get each one properly
@@ -1351,10 +1351,10 @@ public class TestFromClientSide {
// Insert 4 more versions of same column and a dupe
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILY, QUALIFIER, STAMPS[6], VALUES[6]);
- put.add(FAMILY, QUALIFIER, STAMPS[7], VALUES[7]);
- put.add(FAMILY, QUALIFIER, STAMPS[8], VALUES[8]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[6], VALUES[6]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[7], VALUES[7]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[8], VALUES[8]);
ht.put(put);
// Ensure maxVersions in query is respected
@@ -1414,10 +1414,10 @@ public class TestFromClientSide {
// Insert 4 more versions of same column and a dupe
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[9], VALUES[9]);
- put.add(FAMILY, QUALIFIER, STAMPS[11], VALUES[11]);
- put.add(FAMILY, QUALIFIER, STAMPS[13], VALUES[13]);
- put.add(FAMILY, QUALIFIER, STAMPS[15], VALUES[15]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[9], VALUES[9]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[11], VALUES[11]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[13], VALUES[13]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[15], VALUES[15]);
ht.put(put);
get = new Get(ROW);
@@ -1476,19 +1476,19 @@ public class TestFromClientSide {
// Insert limit + 1 on each family
Put put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILIES[0], QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILIES[1], QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILIES[1], QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILIES[1], QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[4], VALUES[4]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[5], VALUES[5]);
- put.add(FAMILIES[2], QUALIFIER, STAMPS[6], VALUES[6]);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[5], VALUES[5]);
+ put.addColumn(FAMILIES[2], QUALIFIER, STAMPS[6], VALUES[6]);
ht.put(put);
// Verify we only get the right number out of each
@@ -1673,7 +1673,7 @@ public class TestFromClientSide {
Put put = new Put(ROW);
for (int q = 0; q < 1; q++)
for (int t = 0; t < 5; t++)
- put.add(FAMILY, QUALIFIERS[q], ts[t], VALUES[t]);
+ put.addColumn(FAMILY, QUALIFIERS[q], ts[t], VALUES[t]);
ht.put(put);
admin.flush(TABLE);
@@ -1717,7 +1717,7 @@ public class TestFromClientSide {
put = new Put(ROW);
for (int q = 0; q < 5; q++)
for (int t = 0; t < 5; t++)
- put.add(FAMILY, QUALIFIERS[q], ts[t], VALUES[t]);
+ put.addColumn(FAMILY, QUALIFIERS[q], ts[t], VALUES[t]);
ht.put(put);
admin.flush(TABLE);
@@ -1726,7 +1726,7 @@ public class TestFromClientSide {
put = new Put(ROW2);
for (int q = 0; q < 5; q++)
for (int t = 0; t < 5; t++)
- put.add(FAMILY, QUALIFIERS[q], ts[t], VALUES[t]);
+ put.addColumn(FAMILY, QUALIFIERS[q], ts[t], VALUES[t]);
ht.put(put);
admin.flush(TABLE);
@@ -1825,8 +1825,8 @@ public class TestFromClientSide {
Table ht = TEST_UTIL.createTable(TABLE, FAMILIES, 3);
Put put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[0], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[1], VALUES[1]);
ht.put(put);
Delete delete = new Delete(ROW);
@@ -1853,12 +1853,12 @@ public class TestFromClientSide {
// Test delete latest version
put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]);
- put.add(FAMILIES[0], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[0], QUALIFIER, ts[3], VALUES[3]);
- put.add(FAMILIES[0], null, ts[4], VALUES[4]);
- put.add(FAMILIES[0], null, ts[2], VALUES[2]);
- put.add(FAMILIES[0], null, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[0], null, ts[4], VALUES[4]);
+ put.addColumn(FAMILIES[0], null, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[0], null, ts[3], VALUES[3]);
ht.put(put);
delete = new Delete(ROW);
@@ -1897,8 +1897,8 @@ public class TestFromClientSide {
// But alas, this is not to be. We can't put them back in either case.
put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]); // 1000
- put.add(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]); // 5000
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]); // 1000
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]); // 5000
ht.put(put);
@@ -1928,24 +1928,24 @@ public class TestFromClientSide {
// Test deleting an entire family from one row but not the other various ways
put = new Put(ROWS[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
ht.put(put);
put = new Put(ROWS[1]);
- put.add(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
ht.put(put);
put = new Put(ROWS[2]);
- put.add(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
ht.put(put);
// Assert that above went in.
@@ -2040,12 +2040,12 @@ public class TestFromClientSide {
ht.delete(delete);
put = new Put(ROWS[3]);
- put.add(FAMILIES[2], QUALIFIER, VALUES[0]);
+ put.addColumn(FAMILIES[2], QUALIFIER, VALUES[0]);
ht.put(put);
put = new Put(ROWS[4]);
- put.add(FAMILIES[1], QUALIFIER, VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, VALUES[2]);
+ put.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, VALUES[2]);
ht.put(put);
get = new Get(ROWS[3]);
@@ -2088,7 +2088,7 @@ public class TestFromClientSide {
byte [] bytes = Bytes.toBytes(i);
put = new Put(bytes);
put.setDurability(Durability.SKIP_WAL);
- put.add(FAMILIES[0], QUALIFIER, bytes);
+ put.addColumn(FAMILIES[0], QUALIFIER, bytes);
ht.put(put);
}
for (int i = 0; i < 10; i++) {
@@ -2197,7 +2197,7 @@ public class TestFromClientSide {
Put put = new Put(ROWS[i]);
put.setDurability(Durability.SKIP_WAL);
for(int j=0;j<numColsPerRow;j++) {
- put.add(FAMILY, QUALIFIERS[j], QUALIFIERS[j]);
+ put.addColumn(FAMILY, QUALIFIERS[j], QUALIFIERS[j]);
}
assertTrue("Put expected to contain " + numColsPerRow + " columns but " +
"only contains " + put.size(), put.size() == numColsPerRow);
@@ -2277,9 +2277,9 @@ public class TestFromClientSide {
// Insert three versions
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
ht.put(put);
// Get the middle value
@@ -2299,8 +2299,8 @@ public class TestFromClientSide {
// Insert two more versions surrounding others, into memstore
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILY, QUALIFIER, STAMPS[6], VALUES[6]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[6], VALUES[6]);
ht.put(put);
// Check we can get everything we should and can't get what we shouldn't
@@ -2341,12 +2341,12 @@ public class TestFromClientSide {
// Insert lots versions
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
- put.add(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
ht.put(put);
getVersionRangeAndVerify(ht, ROW, FAMILY, QUALIFIER, STAMPS, VALUES, 0, 5);
@@ -2390,7 +2390,7 @@ public class TestFromClientSide {
// Insert lots versions
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, manualStamp, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, manualStamp, VALUE);
ht.put(put);
getVersionAndVerify(ht, ROW, FAMILY, QUALIFIER, manualStamp, VALUE);
@@ -2415,12 +2415,12 @@ public class TestFromClientSide {
// Insert lots versions
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
- put.add(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
ht.put(put);
getVersionRangeAndVerifyGreaterThan(ht, ROW, FAMILY, QUALIFIER, STAMPS, VALUES, 0, 5);
@@ -2458,12 +2458,12 @@ public class TestFromClientSide {
// Insert lots versions
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
- put.add(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
- put.add(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[0], VALUES[0]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
ht.put(put);
getAllVersionsAndVerify(ht, ROW, FAMILY, QUALIFIER, STAMPS, VALUES, 0, 5);
@@ -3288,10 +3288,10 @@ public class TestFromClientSide {
// Insert 4 versions of same column
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
- put.add(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
- put.add(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[1], VALUES[1]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[2], VALUES[2]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[4]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[5], VALUES[5]);
ht.put(put);
// Verify we can get each one properly
@@ -3377,11 +3377,11 @@ public class TestFromClientSide {
// Insert 4 more versions of same column and a dupe
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
- put.add(FAMILY, QUALIFIER, STAMPS[4], VALUES[14]);
- put.add(FAMILY, QUALIFIER, STAMPS[6], VALUES[6]);
- put.add(FAMILY, QUALIFIER, STAMPS[7], VALUES[7]);
- put.add(FAMILY, QUALIFIER, STAMPS[8], VALUES[8]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[3], VALUES[3]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[4], VALUES[14]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[6], VALUES[6]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[7], VALUES[7]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[8], VALUES[8]);
ht.put(put);
// Ensure maxVersions in query is respected
@@ -3441,10 +3441,10 @@ public class TestFromClientSide {
// Insert 4 more versions of same column and a dupe
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, STAMPS[9], VALUES[9]);
- put.add(FAMILY, QUALIFIER, STAMPS[11], VALUES[11]);
- put.add(FAMILY, QUALIFIER, STAMPS[13], VALUES[13]);
- put.add(FAMILY, QUALIFIER, STAMPS[15], VALUES[15]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[9], VALUES[9]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[11], VALUES[11]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[13], VALUES[13]);
+ put.addColumn(FAMILY, QUALIFIER, STAMPS[15], VALUES[15]);
ht.put(put);
get = new Get(ROW);
@@ -3501,15 +3501,15 @@ public class TestFromClientSide {
byte[] row = Bytes.toBytes("row1");
byte[] qualifier = Bytes.toBytes("myCol");
Put put = new Put(row);
- put.add(FAMILY, qualifier, 1L, Bytes.toBytes("AAA"));
+ put.addColumn(FAMILY, qualifier, 1L, Bytes.toBytes("AAA"));
hTable.put(put);
put = new Put(row);
- put.add(FAMILY, qualifier, 2L, Bytes.toBytes("BBB"));
+ put.addColumn(FAMILY, qualifier, 2L, Bytes.toBytes("BBB"));
hTable.put(put);
put = new Put(row);
- put.add(FAMILY, qualifier, 3L, Bytes.toBytes("EEE"));
+ put.addColumn(FAMILY, qualifier, 3L, Bytes.toBytes("EEE"));
hTable.put(put);
Get get = new Get(row);
@@ -3526,12 +3526,12 @@ public class TestFromClientSide {
// Update the value at timestamp 1
put = new Put(row);
- put.add(FAMILY, qualifier, 1L, Bytes.toBytes("CCC"));
+ put.addColumn(FAMILY, qualifier, 1L, Bytes.toBytes("CCC"));
hTable.put(put);
// Update the value at timestamp 2
put = new Put(row);
- put.add(FAMILY, qualifier, 2L, Bytes.toBytes("DDD"));
+ put.addColumn(FAMILY, qualifier, 2L, Bytes.toBytes("DDD"));
hTable.put(put);
// Check that the values at timestamp 2 and 1 got updated
@@ -3552,15 +3552,15 @@ public class TestFromClientSide {
byte[] row = Bytes.toBytes("row2");
byte[] qualifier = Bytes.toBytes("myCol");
Put put = new Put(row);
- put.add(FAMILY, qualifier, 1L, Bytes.toBytes("AAA"));
+ put.addColumn(FAMILY, qualifier, 1L, Bytes.toBytes("AAA"));
hTable.put(put);
put = new Put(row);
- put.add(FAMILY, qualifier, 2L, Bytes.toBytes("BBB"));
+ put.addColumn(FAMILY, qualifier, 2L, Bytes.toBytes("BBB"));
hTable.put(put);
put = new Put(row);
- put.add(FAMILY, qualifier, 3L, Bytes.toBytes("EEE"));
+ put.addColumn(FAMILY, qualifier, 3L, Bytes.toBytes("EEE"));
hTable.put(put);
Get get = new Get(row);
@@ -3582,12 +3582,12 @@ public class TestFromClientSide {
// Update the value at timestamp 1
put = new Put(row);
- put.add(FAMILY, qualifier, 1L, Bytes.toBytes("CCC"));
+ put.addColumn(FAMILY, qualifier, 1L, Bytes.toBytes("CCC"));
hTable.put(put);
// Update the value at timestamp 2
put = new Put(row);
- put.add(FAMILY, qualifier, 2L, Bytes.toBytes("DDD"));
+ put.addColumn(FAMILY, qualifier, 2L, Bytes.toBytes("DDD"));
hTable.put(put);
// Trigger a major compaction
@@ -3614,15 +3614,15 @@ public class TestFromClientSide {
byte[] row = Bytes.toBytes("row3");
byte[] qualifier = Bytes.toBytes("myCol");
Put put = new Put(row);
- put.add(FAMILY, qualifier, 1L, Bytes.toBytes("AAA"));
+ put.addColumn(FAMILY, qualifier, 1L, Bytes.toBytes("AAA"));
hTable.put(put);
put = new Put(row);
- put.add(FAMILY, qualifier, 2L, Bytes.toBytes("BBB"));
+ put.addColumn(FAMILY, qualifier, 2L, Bytes.toBytes("BBB"));
hTable.put(put);
put = new Put(row);
- put.add(FAMILY, qualifier, 3L, Bytes.toBytes("EEE"));
+ put.addColumn(FAMILY, qualifier, 3L, Bytes.toBytes("EEE"));
hTable.put(put);
Get get = new Get(row);
@@ -3644,7 +3644,7 @@ public class TestFromClientSide {
// Update the value at timestamp 1
put = new Put(row);
- put.add(FAMILY, qualifier, 1L, Bytes.toBytes("CCC"));
+ put.addColumn(FAMILY, qualifier, 1L, Bytes.toBytes("CCC"));
hTable.put(put);
// Trigger a major compaction
@@ -3654,7 +3654,7 @@ public class TestFromClientSide {
// Update the value at timestamp 2
put = new Put(row);
- put.add(FAMILY, qualifier, 2L, Bytes.toBytes("DDD"));
+ put.addColumn(FAMILY, qualifier, 2L, Bytes.toBytes("DDD"));
hTable.put(put);
// Trigger a major compaction
@@ -3683,11 +3683,11 @@ public class TestFromClientSide {
public void testGet_NullQualifier() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_NullQualifier"), FAMILY);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
put = new Put(ROW);
- put.add(FAMILY, null, VALUE);
+ put.addColumn(FAMILY, null, VALUE);
table.put(put);
LOG.info("Row put");
@@ -3706,7 +3706,7 @@ public class TestFromClientSide {
public void testGet_NonExistentRow() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testGet_NonExistentRow"), FAMILY);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
LOG.info("Row put");
@@ -3734,11 +3734,11 @@ public class TestFromClientSide {
Table table = TEST_UTIL.createTable(TableName.valueOf("testPut"),
new byte[][] { CONTENTS_FAMILY, SMALL_FAMILY });
Put put = new Put(row1);
- put.add(CONTENTS_FAMILY, null, value);
+ put.addColumn(CONTENTS_FAMILY, null, value);
table.put(put);
put = new Put(row2);
- put.add(CONTENTS_FAMILY, null, value);
+ put.addColumn(CONTENTS_FAMILY, null, value);
assertEquals(put.size(), 1);
assertEquals(put.getFamilyCellMap().get(CONTENTS_FAMILY).size(), 1);
@@ -3774,7 +3774,7 @@ public class TestFromClientSide {
try {
Put p = new Put(ROW);
- p.add(BAD_FAM, QUALIFIER, VAL);
+ p.addColumn(BAD_FAM, QUALIFIER, VAL);
table.put(p);
} catch (RetriesExhaustedWithDetailsException e) {
caughtNSCFE = e.getCause(0) instanceof NoSuchColumnFamilyException;
@@ -3796,7 +3796,7 @@ public class TestFromClientSide {
byte[] row = Bytes.toBytes("row" + i);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
- put.add(CONTENTS_FAMILY, null, value);
+ put.addColumn(CONTENTS_FAMILY, null, value);
rowsUpdate.add(put);
}
table.put(rowsUpdate);
@@ -3828,7 +3828,7 @@ public class TestFromClientSide {
byte[] row = Bytes.toBytes("row" + i);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
- put.add(CONTENTS_FAMILY, null, value);
+ put.addColumn(CONTENTS_FAMILY, null, value);
rowsUpdate.add(put);
}
table.put(rowsUpdate);
@@ -3869,7 +3869,7 @@ public class TestFromClientSide {
byte[] row = Bytes.toBytes("row" + i);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
- put.add(CONTENTS_FAMILY, null, value);
+ put.addColumn(CONTENTS_FAMILY, null, value);
rowsUpdate.add(put);
}
table.put(rowsUpdate);
@@ -3927,7 +3927,7 @@ public class TestFromClientSide {
new byte [][] {FAM1, FAM2});
// Insert some values
Put put = new Put(ROW);
- put.add(FAM1, Bytes.toBytes("letters"), Bytes.toBytes("abcdefg"));
+ put.addColumn(FAM1, Bytes.toBytes("letters"), Bytes.toBytes("abcdefg"));
table.put(put);
try {
Thread.sleep(1000);
@@ -3936,7 +3936,7 @@ public class TestFromClientSide {
}
put = new Put(ROW);
- put.add(FAM1, Bytes.toBytes("numbers"), Bytes.toBytes("123456"));
+ put.addColumn(FAM1, Bytes.toBytes("numbers"), Bytes.toBytes("123456"));
table.put(put);
try {
@@ -3946,7 +3946,7 @@ public class TestFromClientSide {
}
put = new Put(ROW);
- put.add(FAM2, Bytes.toBytes("letters"), Bytes.toBytes("hijklmnop"));
+ put.addColumn(FAM2, Bytes.toBytes("letters"), Bytes.toBytes("hijklmnop"));
table.put(put);
long times[] = new long[3];
@@ -4099,7 +4099,7 @@ public class TestFromClientSide {
Table a = TEST_UTIL.createTable(tableAname, HConstants.CATALOG_FAMILY);
Table b = TEST_UTIL.createTable(tableBname, HConstants.CATALOG_FAMILY);
Put put = new Put(ROW);
- put.add(HConstants.CATALOG_FAMILY, null, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, null, value);
a.put(put);
// open a new connection to A and a connection to b
@@ -4198,10 +4198,10 @@ public class TestFromClientSide {
byte[] three = new byte[] { 3 };
byte[] four = new byte[] { 4 };
- put1.add(HConstants.CATALOG_FAMILY, null, one);
- put2.add(HConstants.CATALOG_FAMILY, null, two);
- put3.add(HConstants.CATALOG_FAMILY, null, three);
- put4.add(HConstants.CATALOG_FAMILY, null, four);
+ put1.addColumn(HConstants.CATALOG_FAMILY, null, one);
+ put2.addColumn(HConstants.CATALOG_FAMILY, null, two);
+ put3.addColumn(HConstants.CATALOG_FAMILY, null, three);
+ put4.addColumn(HConstants.CATALOG_FAMILY, null, four);
table.put(put1);
table.put(put2);
table.put(put3);
@@ -4304,11 +4304,11 @@ public class TestFromClientSide {
Table t = TEST_UTIL.createTable(TABLENAME, FAMILY);
Put p = new Put(ROW);
- p.add(FAMILY, QUALIFIER, VALUE);
+ p.addColumn(FAMILY, QUALIFIER, VALUE);
MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, p);
p = new Put(ROW1);
- p.add(FAMILY, QUALIFIER, VALUE);
+ p.addColumn(FAMILY, QUALIFIER, VALUE);
MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, p);
MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
@@ -4337,7 +4337,7 @@ public class TestFromClientSide {
};
RowMutations arm = new RowMutations(ROW);
Put p = new Put(ROW);
- p.add(FAMILY, QUALIFIERS[0], VALUE);
+ p.addColumn(FAMILY, QUALIFIERS[0], VALUE);
arm.add(p);
t.mutateRow(arm);
@@ -4347,7 +4347,7 @@ public class TestFromClientSide {
arm = new RowMutations(ROW);
p = new Put(ROW);
- p.add(FAMILY, QUALIFIERS[1], VALUE);
+ p.addColumn(FAMILY, QUALIFIERS[1], VALUE);
arm.add(p);
Delete d = new Delete(ROW);
d.deleteColumns(FAMILY, QUALIFIERS[0]);
@@ -4362,7 +4362,7 @@ public class TestFromClientSide {
try {
arm = new RowMutations(ROW);
p = new Put(ROW);
- p.add(new byte[]{'b', 'o', 'g', 'u', 's'}, QUALIFIERS[0], VALUE);
+ p.addColumn(new byte[]{'b', 'o', 'g', 'u', 's'}, QUALIFIERS[0], VALUE);
arm.add(p);
t.mutateRow(arm);
fail("Expected NoSuchColumnFamilyException");
@@ -4429,7 +4429,7 @@ public class TestFromClientSide {
final byte[] COLUMN = Bytes.toBytes("column");
Put p = new Put(ROW);
// write an integer here (not a Long)
- p.add(FAMILY, COLUMN, Bytes.toBytes(5));
+ p.addColumn(FAMILY, COLUMN, Bytes.toBytes(5));
ht.put(p);
try {
ht.incrementColumnValue(ROW, FAMILY, COLUMN, 5);
@@ -4676,7 +4676,7 @@ public class TestFromClientSide {
for (int versions = 1; versions <= numVersions; versions++) {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, ts + versions, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, ts + versions, VALUE);
table.put(put);
Result result = table.get(get);
@@ -4712,7 +4712,7 @@ public class TestFromClientSide {
for (int versions = 1; versions <= numVersions; versions++) {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, ts + versions, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, ts + versions, VALUE);
table.put(put);
Result result = table.get(get);
@@ -4738,7 +4738,7 @@ public class TestFromClientSide {
public Void call() {
try {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, ts + versionsCopy, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, ts + versionsCopy, VALUE);
table.put(put);
Result result = table.get(get);
@@ -4782,7 +4782,7 @@ public class TestFromClientSide {
Table table = TEST_UTIL.createTable(TableName.valueOf("testCheckAndPut"), FAMILY);
Put put1 = new Put(ROW);
- put1.add(FAMILY, QUALIFIER, VALUE);
+ put1.addColumn(FAMILY, QUALIFIER, VALUE);
// row doesn't exist, so using non-null value should be considered "not match".
boolean ok = table.checkAndPut(ROW, FAMILY, QUALIFIER, VALUE, put1);
@@ -4797,14 +4797,14 @@ public class TestFromClientSide {
assertEquals(ok, false);
Put put2 = new Put(ROW);
- put2.add(FAMILY, QUALIFIER, value2);
+ put2.addColumn(FAMILY, QUALIFIER, value2);
// row now exists, use the matching value to check
ok = table.checkAndPut(ROW, FAMILY, QUALIFIER, VALUE, put2);
assertEquals(ok, true);
Put put3 = new Put(anotherrow);
- put3.add(FAMILY, QUALIFIER, VALUE);
+ put3.addColumn(FAMILY, QUALIFIER, VALUE);
// try to do CheckAndPut on different rows
try {
@@ -4824,10 +4824,10 @@ public class TestFromClientSide {
Table table = TEST_UTIL.createTable(TableName.valueOf("testCheckAndPutWithCompareOp"), FAMILY);
Put put2 = new Put(ROW);
- put2.add(FAMILY, QUALIFIER, value2);
+ put2.addColumn(FAMILY, QUALIFIER, value2);
Put put3 = new Put(ROW);
- put3.add(FAMILY, QUALIFIER, value3);
+ put3.addColumn(FAMILY, QUALIFIER, value3);
// row doesn't exist, so using "null" to check for existence should be considered "match".
boolean ok = table.checkAndPut(ROW, FAMILY, QUALIFIER, null, put2);
@@ -4890,11 +4890,11 @@ public class TestFromClientSide {
FAMILY);
Put put2 = new Put(ROW);
- put2.add(FAMILY, QUALIFIER, value2);
+ put2.addColumn(FAMILY, QUALIFIER, value2);
table.put(put2);
Put put3 = new Put(ROW);
- put3.add(FAMILY, QUALIFIER, value3);
+ put3.addColumn(FAMILY, QUALIFIER, value3);
Delete delete = new Delete(ROW);
delete.deleteColumns(FAMILY, QUALIFIER);
@@ -4972,11 +4972,11 @@ public class TestFromClientSide {
// Create 3 rows in the table, with rowkeys starting with "zzz*" so that
// scan are forced to hit all the regions.
Put put1 = new Put(Bytes.toBytes("zzz1"));
- put1.add(FAMILY, QUALIFIER, VALUE);
+ put1.addColumn(FAMILY, QUALIFIER, VALUE);
Put put2 = new Put(Bytes.toBytes("zzz2"));
- put2.add(FAMILY, QUALIFIER, VALUE);
+ put2.addColumn(FAMILY, QUALIFIER, VALUE);
Put put3 = new Put(Bytes.toBytes("zzz3"));
- put3.add(FAMILY, QUALIFIER, VALUE);
+ put3.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(Arrays.asList(put1, put2, put3));
Scan scan1 = new Scan();
@@ -5134,7 +5134,7 @@ public class TestFromClientSide {
// insert data
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, data);
+ put.addColumn(FAMILY, QUALIFIER, data);
table.put(put);
assertTrue(Bytes.equals(table.get(new Get(ROW)).value(), data));
// data was in memstore so don't expect any changes
@@ -5160,7 +5160,7 @@ public class TestFromClientSide {
byte [] QUALIFIER2 = Bytes.add(QUALIFIER, QUALIFIER);
byte [] data2 = Bytes.add(data, data);
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER2, data2);
+ put.addColumn(FAMILY, QUALIFIER2, data2);
table.put(put);
Result r = table.get(new Get(ROW));
assertTrue(Bytes.equals(r.getValue(FAMILY, QUALIFIER), data));
@@ -5346,7 +5346,7 @@ public class TestFromClientSide {
List<Put> puts = new ArrayList<Put>();
for (int i=0;i !=100; i++){
Put put = new Put(Bytes.toBytes(i));
- put.add(FAMILY, FAMILY, Bytes.toBytes(i));
+ put.addColumn(FAMILY, FAMILY, Bytes.toBytes(i));
puts.add(put);
}
foo.put(puts);
@@ -5369,11 +5369,11 @@ public class TestFromClientSide {
public void testScan_NullQualifier() throws IOException {
Table table = TEST_UTIL.createTable(TableName.valueOf("testScan_NullQualifier"), FAMILY);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
put = new Put(ROW);
- put.add(FAMILY, null, VALUE);
+ put.addColumn(FAMILY, null, VALUE);
table.put(put);
LOG.info("Row put");
@@ -5400,7 +5400,7 @@ public class TestFromClientSide {
try {
Put put = new Put(ROW, -1);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
fail("Negative timestamps should not have been allowed");
} catch (IllegalArgumentException ex) {
@@ -5409,7 +5409,8 @@ public class TestFromClientSide {
try {
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, -1, VALUE);
+ long ts = -1;
+ put.addColumn(FAMILY, QUALIFIER, ts, VALUE);
table.put(put);
fail("Negative timestamps should not have been allowed");
} catch (IllegalArgumentException ex) {
@@ -5606,18 +5607,18 @@ public class TestFromClientSide {
// put the same row 4 times, with different values
Put p = new Put(row);
- p.add(FAMILY, QUALIFIER, 10, VALUE);
+ p.addColumn(FAMILY, QUALIFIER, (long) 10, VALUE);
table.put(p);
p = new Put(row);
- p.add(FAMILY, QUALIFIER, 11, ArrayUtils.add(VALUE, (byte) 2));
+ p.addColumn(FAMILY, QUALIFIER, (long) 11, ArrayUtils.add(VALUE, (byte) 2));
table.put(p);
p = new Put(row);
- p.add(FAMILY, QUALIFIER, 12, ArrayUtils.add(VALUE, (byte) 3));
+ p.addColumn(FAMILY, QUALIFIER, (long) 12, ArrayUtils.add(VALUE, (byte) 3));
table.put(p);
p = new Put(row);
- p.add(FAMILY, QUALIFIER, 13, ArrayUtils.add(VALUE, (byte) 4));
+ p.addColumn(FAMILY, QUALIFIER, (long) 13, ArrayUtils.add(VALUE, (byte) 4));
table.put(p);
int versions = 4;
@@ -5678,7 +5679,7 @@ public class TestFromClientSide {
int insertNum = 10;
for (int i = 0; i < 10; i++) {
Put put = new Put(Bytes.toBytes("row" + String.format("%03d", i)));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
}
@@ -5710,34 +5711,34 @@ public class TestFromClientSide {
TableName TABLE = TableName.valueOf("testSuperSimpleWithReverseScan");
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
Put put = new Put(Bytes.toBytes("0-b11111-0000000000000000000"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b11111-0000000000000000002"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b11111-0000000000000000004"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b11111-0000000000000000006"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b11111-0000000000000000008"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b22222-0000000000000000001"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b22222-0000000000000000003"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b22222-0000000000000000005"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b22222-0000000000000000007"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
put = new Put(Bytes.toBytes("0-b22222-0000000000000000009"));
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
Scan scan = new Scan(Bytes.toBytes("0-b11111-9223372036854775807"),
Bytes.toBytes("0-b11111-0000000000000000000"));
@@ -5767,7 +5768,7 @@ public class TestFromClientSide {
Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
- put.add(FAMILY, QUALIFIERS[i], VALUE);
+ put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
@@ -5809,7 +5810,7 @@ public class TestFromClientSide {
Bytes.toBytes("col9-<d2v1>-<d3v2>") };
for (int i = 0; i < 10; i++) {
Put put = new Put(ROWS[i]);
- put.add(FAMILY, QUALIFIERS[i], VALUE);
+ put.addColumn(FAMILY, QUALIFIERS[i], VALUE);
ht.put(put);
}
Scan scan = new Scan();
@@ -5870,7 +5871,7 @@ public class TestFromClientSide {
// Insert a row
Put put = new Put(ROWS[2]);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
ht.put(put);
// Make sure we can scan the row
@@ -5904,7 +5905,7 @@ public class TestFromClientSide {
Table ht = TEST_UTIL.createTable(TABLE, FAMILY);
// Null qualifier (should work)
Put put = new Put(ROW);
- put.add(FAMILY, null, VALUE);
+ put.addColumn(FAMILY, null, VALUE);
ht.put(put);
scanTestNull(ht, ROW, FAMILY, VALUE, true);
Delete delete = new Delete(ROW);
@@ -5915,7 +5916,7 @@ public class TestFromClientSide {
ht = TEST_UTIL.createTable(TableName.valueOf(TABLE2), FAMILY);
// Empty qualifier, byte[0] instead of null (should work)
put = new Put(ROW);
- put.add(FAMILY, HConstants.EMPTY_BYTE_ARRAY, VALUE);
+ put.addColumn(FAMILY, HConstants.EMPTY_BYTE_ARRAY, VALUE);
ht.put(put);
scanTestNull(ht, ROW, FAMILY, VALUE, true);
TEST_UTIL.flush();
@@ -5925,7 +5926,7 @@ public class TestFromClientSide {
ht.delete(delete);
// Null value
put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, null);
+ put.addColumn(FAMILY, QUALIFIER, null);
ht.put(put);
Scan scan = new Scan();
scan.setReversed(true);
@@ -5945,8 +5946,8 @@ public class TestFromClientSide {
Table ht = TEST_UTIL.createTable(TABLE, FAMILIES, 3);
Put put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[0], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[1], VALUES[1]);
ht.put(put);
Delete delete = new Delete(ROW);
@@ -5963,12 +5964,12 @@ public class TestFromClientSide {
// Test delete latest version
put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]);
- put.add(FAMILIES[0], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[0], QUALIFIER, ts[3], VALUES[3]);
- put.add(FAMILIES[0], null, ts[4], VALUES[4]);
- put.add(FAMILIES[0], null, ts[2], VALUES[2]);
- put.add(FAMILIES[0], null, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[0], null, ts[4], VALUES[4]);
+ put.addColumn(FAMILIES[0], null, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[0], null, ts[3], VALUES[3]);
ht.put(put);
delete = new Delete(ROW);
@@ -5997,8 +5998,8 @@ public class TestFromClientSide {
// But alas, this is not to be. We can't put them back in either case.
put = new Put(ROW);
- put.add(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]); // 1000
- put.add(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]); // 5000
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[0], QUALIFIER, ts[4], VALUES[4]);
ht.put(put);
// The Scanner returns the previous values, the expected-naive-unexpected
@@ -6016,24 +6017,24 @@ public class TestFromClientSide {
// ways
put = new Put(ROWS[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
ht.put(put);
put = new Put(ROWS[1]);
- put.add(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
ht.put(put);
put = new Put(ROWS[2]);
- put.add(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
- put.add(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
- put.add(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[0], VALUES[0]);
+ put.addColumn(FAMILIES[1], QUALIFIER, ts[1], VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[2], VALUES[2]);
+ put.addColumn(FAMILIES[2], QUALIFIER, ts[3], VALUES[3]);
ht.put(put);
delete = new Delete(ROWS[0]);
@@ -6087,12 +6088,12 @@ public class TestFromClientSide {
ht.delete(delete);
put = new Put(ROWS[3]);
- put.add(FAMILIES[2], QUALIFIER, VALUES[0]);
+ put.addColumn(FAMILIES[2], QUALIFIER, VALUES[0]);
ht.put(put);
put = new Put(ROWS[4]);
- put.add(FAMILIES[1], QUALIFIER, VALUES[1]);
- put.add(FAMILIES[2], QUALIFIER, VALUES[2]);
+ put.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
+ put.addColumn(FAMILIES[2], QUALIFIER, VALUES[2]);
ht.put(put);
scan = new Scan(ROWS[4]);
@@ -6142,7 +6143,7 @@ public class TestFromClientSide {
int insertNum = splitRows.length;
for (int i = 0; i < insertNum; i++) {
Put put = new Put(splitRows[i]);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
}
@@ -6194,14 +6195,14 @@ public class TestFromClientSide {
}
for (byte[] splitRow : splitRows) {
Put put = new Put(splitRow);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
byte[] nextRow = Bytes.copy(splitRow);
nextRow[nextRow.length - 1]++;
put = new Put(nextRow);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 22309ef..ece98c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -109,7 +109,7 @@ public class TestFromClientSide3 {
for (int i = 0; i < nPuts; i++) {
byte[] qualifier = Bytes.toBytes(random.nextInt());
byte[] value = Bytes.toBytes(random.nextInt());
- put.add(family, qualifier, value);
+ put.addColumn(family, qualifier, value);
}
table.put(put);
}
@@ -274,7 +274,7 @@ public class TestFromClientSide3 {
actions.add(put1);
Put put2 = new Put(ANOTHERROW);
- put2.add(FAMILY, QUALIFIER, VALUE);
+ put2.addColumn(FAMILY, QUALIFIER, VALUE);
actions.add(put2);
table.batch(actions, results);
@@ -294,7 +294,7 @@ public class TestFromClientSide3 {
new byte[][] { FAMILY });
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
Get get = new Get(ROW);
@@ -312,7 +312,7 @@ public class TestFromClientSide3 {
"testHTableExistsMethodSingleRegionMultipleGets"), new byte[][] { FAMILY });
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
List<Get> gets = new ArrayList<Get>();
@@ -406,7 +406,7 @@ public class TestFromClientSide3 {
TableName.valueOf("testHTableExistsMethodMultipleRegionsMultipleGets"),
new byte[][] { FAMILY }, 1, new byte[] { 0x00 }, new byte[] { (byte) 0xff }, 255);
Put put = new Put(ROW);
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put (put);
List<Get> gets = new ArrayList<Get>();
@@ -424,7 +424,7 @@ public class TestFromClientSide3 {
// Test with the first region.
put = new Put(new byte[] { 0x00 });
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
gets = new ArrayList<Get>();
@@ -436,7 +436,7 @@ public class TestFromClientSide3 {
// Test with the last region
put = new Put(new byte[] { (byte) 0xff, (byte) 0xff });
- put.add(FAMILY, QUALIFIER, VALUE);
+ put.addColumn(FAMILY, QUALIFIER, VALUE);
table.put(put);
gets = new ArrayList<Get>();
@@ -459,7 +459,7 @@ public class TestFromClientSide3 {
Table table = TEST_UTIL.getConnection().getTable(desc.getTableName());
Put put = new Put(ROW_BYTES);
- put.add(FAMILY, COL_QUAL, VAL_BYTES);
+ put.addColumn(FAMILY, COL_QUAL, VAL_BYTES);
table.put(put);
//Try getting the row with an empty row key
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
index dbb1cd1..550a4c9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
@@ -69,7 +69,9 @@ public class TestFromClientSideNoCodec {
// Check put and get.
final byte [] row = Bytes.toBytes("row");
Put p = new Put(row);
- for (byte [] f: fs) p.add(f, f, f);
+ for (byte [] f: fs) {
+ p.addColumn(f, f, f);
+ }
ht.put(p);
Result r = ht.get(new Get(row));
int i = 0;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index a9b85c7..16465d2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -248,7 +248,7 @@ public class TestHCM {
}
Put p1 = new Put(rk);
- p1.add(cf, "qual".getBytes(), "val".getBytes());
+ p1.addColumn(cf, "qual".getBytes(), "val".getBytes());
t.put(p1);
rs.getRegionServer().abort("I'm dead");
@@ -430,7 +430,7 @@ public class TestHCM {
final Table table = connection.getTable(tableName);
Put put = new Put(ROW);
- put.add(FAM_NAM, ROW, ROW);
+ put.addColumn(FAM_NAM, ROW, ROW);
table.put(put);
ManualEnvironmentEdge mee = new ManualEnvironmentEdge();
@@ -583,7 +583,7 @@ public class TestHCM {
TEST_UTIL.waitUntilAllRegionsAssigned(table.getName());
Put put = new Put(ROW);
- put.add(FAM_NAM, ROW, ROW);
+ put.addColumn(FAM_NAM, ROW, ROW);
table.put(put);
ConnectionImplementation conn = (ConnectionImplementation) connection;
@@ -606,7 +606,7 @@ public class TestHCM {
conn.clearRegionCache(TABLE_NAME);
Assert.assertEquals(0, conn.getNumberOfCachedRegionLocations(TABLE_NAME));
Put put2 = new Put(ROW);
- put2.add(FAM_NAM, ROW, ROW);
+ put2.addColumn(FAM_NAM, ROW, ROW);
table.put(put2);
assertNotNull(conn.getCachedLocation(TABLE_NAME, ROW));
assertNotNull(conn.getCachedLocation(TableName.valueOf(TABLE_NAME.getName()), ROW.clone()));
@@ -676,7 +676,7 @@ public class TestHCM {
// We do a put and expect the cache to be updated, even if we don't retry
LOG.info("Put starting");
Put put3 = new Put(ROW);
- put3.add(FAM_NAM, ROW, ROW);
+ put3.addColumn(FAM_NAM, ROW, ROW);
try {
table.put(put3);
Assert.fail("Unreachable point");
@@ -783,7 +783,7 @@ public class TestHCM {
public void testCacheSeqNums() throws Exception{
Table table = TEST_UTIL.createMultiRegionTable(TABLE_NAME2, FAM_NAM);
Put put = new Put(ROW);
- put.add(FAM_NAM, ROW, ROW);
+ put.addColumn(FAM_NAM, ROW, ROW);
table.put(put);
ConnectionImplementation conn = (ConnectionImplementation) TEST_UTIL.getConnection();
@@ -895,44 +895,44 @@ public class TestHCM {
@Test
public void testMulti() throws Exception {
Table table = TEST_UTIL.createMultiRegionTable(TABLE_NAME3, FAM_NAM);
- try {
- ConnectionImplementation conn =
+ try {
+ ConnectionImplementation conn =
(ConnectionImplementation)TEST_UTIL.getConnection();
- // We're now going to move the region and check that it works for the client
- // First a new put to add the location in the cache
- conn.clearRegionCache(TABLE_NAME3);
- Assert.assertEquals(0, conn.getNumberOfCachedRegionLocations(TABLE_NAME3));
+ // We're now going to move the region and check that it works for the client
+ // First a new put to add the location in the cache
+ conn.clearRegionCache(TABLE_NAME3);
+ Assert.assertEquals(0, conn.getNumberOfCachedRegionLocations(TABLE_NAME3));
- TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, false);
- HMaster master = TEST_UTIL.getMiniHBaseCluster().getMaster();
+ TEST_UTIL.getHBaseAdmin().setBalancerRunning(false, false);
+ HMaster master = TEST_UTIL.getMiniHBaseCluster().getMaster();
- // We can wait for all regions to be online, that makes log reading easier when debugging
- while (master.getAssignmentManager().getRegionStates().isRegionsInTransition()) {
- Thread.sleep(1);
- }
+ // We can wait for all regions to be online, that makes log reading easier when debugging
+ while (master.getAssignmentManager().getRegionStates().isRegionsInTransition()) {
+ Thread.sleep(1);
+ }
- Put put = new Put(ROW_X);
- put.add(FAM_NAM, ROW_X, ROW_X);
- table.put(put);
+ Put put = new Put(ROW_X);
+ put.addColumn(FAM_NAM, ROW_X, ROW_X);
+ table.put(put);
- // Now moving the region to the second server
- HRegionLocation toMove = conn.getCachedLocation(TABLE_NAME3, ROW_X).getRegionLocation();
- byte[] regionName = toMove.getRegionInfo().getRegionName();
- byte[] encodedRegionNameBytes = toMove.getRegionInfo().getEncodedNameAsBytes();
+ // Now moving the region to the second server
+ HRegionLocation toMove = conn.getCachedLocation(TABLE_NAME3, ROW_X).getRegionLocation();
+ byte[] regionName = toMove.getRegionInfo().getRegionName();
+ byte[] encodedRegionNameBytes = toMove.getRegionInfo().getEncodedNameAsBytes();
- // Choose the other server.
- int curServerId = TEST_UTIL.getHBaseCluster().getServerWith(regionName);
- int destServerId = (curServerId == 0 ? 1 : 0);
+ // Choose the other server.
+ int curServerId = TEST_UTIL.getHBaseCluster().getServerWith(regionName);
+ int destServerId = (curServerId == 0 ? 1 : 0);
- HRegionServer curServer = TEST_UTIL.getHBaseCluster().getRegionServer(curServerId);
- HRegionServer destServer = TEST_UTIL.getHBaseCluster().getRegionServer(destServerId);
+ HRegionServer curServer = TEST_UTIL.getHBaseCluster().getRegionServer(curServerId);
+ HRegionServer destServer = TEST_UTIL.getHBaseCluster().getRegionServer(destServerId);
- ServerName destServerName = destServer.getServerName();
+ ServerName destServerName = destServer.getServerName();
//find another row in the cur server that is less than ROW_X
- List<Region> regions = curServer.getOnlineRegions(TABLE_NAME3);
- byte[] otherRow = null;
+ List<Region> regions = curServer.getOnlineRegions(TABLE_NAME3);
+ byte[] otherRow = null;
for (Region region : regions) {
if (!region.getRegionInfo().getEncodedName().equals(toMove.getRegionInfo().getEncodedName())
&& Bytes.BYTES_COMPARATOR.compare(region.getRegionInfo().getStartKey(), ROW_X) < 0) {
@@ -940,69 +940,71 @@ public class TestHCM {
break;
}
}
- assertNotNull(otherRow);
- // If empty row, set it to first row.-f
- if (otherRow.length <= 0) otherRow = Bytes.toBytes("aaa");
- Put put2 = new Put(otherRow);
- put2.add(FAM_NAM, otherRow, otherRow);
- table.put(put2); //cache put2's location
-
- // Check that we are in the expected state
- Assert.assertTrue(curServer != destServer);
- Assert.assertNotEquals(curServer.getServerName(), destServer.getServerName());
- Assert.assertNotEquals(toMove.getPort(), destServerName.getPort());
- Assert.assertNotNull(curServer.getOnlineRegion(regionName));
- Assert.assertNull(destServer.getOnlineRegion(regionName));
- Assert.assertFalse(TEST_UTIL.getMiniHBaseCluster().getMaster().
- getAssignmentManager().getRegionStates().isRegionsInTransition());
+ assertNotNull(otherRow);
+ // If empty row, set it to first row.-f
+ if (otherRow.length <= 0) otherRow = Bytes.toBytes("aaa");
+ Put put2 = new Put(otherRow);
+ put2.addColumn(FAM_NAM, otherRow, otherRow);
+ table.put(put2); //cache put2's location
+
+ // Check that we are in the expected state
+ Assert.assertTrue(curServer != destServer);
+ Assert.assertNotEquals(curServer.getServerName(), destServer.getServerName());
+ Assert.assertNotEquals(toMove.getPort(), destServerName.getPort());
+ Assert.assertNotNull(curServer.getOnlineRegion(regionName));
+ Assert.assertNull(destServer.getOnlineRegion(regionName));
+ Assert.assertFalse(TEST_UTIL.getMiniHBaseCluster().getMaster().
+ getAssignmentManager().getRegionStates().isRegionsInTransition());
// Moving. It's possible that we don't have all the regions online at this point, so
- // the test must depends only on the region we're looking at.
- LOG.info("Move starting region="+toMove.getRegionInfo().getRegionNameAsString());
- TEST_UTIL.getHBaseAdmin().move(
- toMove.getRegionInfo().getEncodedNameAsBytes(),
- destServerName.getServerName().getBytes()
- );
-
- while (destServer.getOnlineRegion(regionName) == null ||
- destServer.getRegionsInTransitionInRS().containsKey(encodedRegionNameBytes) ||
- curServer.getRegionsInTransitionInRS().containsKey(encodedRegionNameBytes) ||
- master.getAssignmentManager().getRegionStates().isRegionsInTransition()) {
- // wait for the move to be finished
- Thread.sleep(1);
- }
+ // the test must depends only on the region we're looking at.
+ LOG.info("Move starting region="+toMove.getRegionInfo().getRegionNameAsString());
+ TEST_UTIL.getHBaseAdmin().move(
+ toMove.getRegionInfo().getEncodedNameAsBytes(),
+ destServerName.getServerName().getBytes()
+ );
+
+ while (destServer.getOnlineRegion(regionName) == null ||
+ destServer.getRegionsInTransitionInRS().containsKey(encodedRegionNameBytes) ||
+ curServer.getRegionsInTransitionInRS().containsKey(encodedRegionNameBytes) ||
+ master.getAssignmentManager().getRegionStates().isRegionsInTransition()) {
+ // wait for the move to be finished
+ Thread.sleep(1);
+ }
- LOG.info("Move finished for region="+toMove.getRegionInfo().getRegionNameAsString());
+ LOG.info("Move finished for region="+toMove.getRegionInfo().getRegionNameAsString());
- // Check our new state.
- Assert.assertNull(curServer.getOnlineRegion(regionName));
- Assert.assertNotNull(destServer.getOnlineRegion(regionName));
- Assert.assertFalse(destServer.getRegionsInTransitionInRS().containsKey(encodedRegionNameBytes));
- Assert.assertFalse(curServer.getRegionsInTransitionInRS().containsKey(encodedRegionNameBytes));
+ // Check our new state.
+ Assert.assertNull(curServer.getOnlineRegion(regionName));
+ Assert.assertNotNull(destServer.getOnlineRegion(regionName));
+ Assert.assertFalse(destServer.getRegionsInTransitionInRS()
+ .containsKey(encodedRegionNameBytes));
+ Assert.assertFalse(curServer.getRegionsInTransitionInRS()
+ .containsKey(encodedRegionNameBytes));
// Cache was NOT updated and points to the wrong server
- Assert.assertFalse(
- conn.getCachedLocation(TABLE_NAME3, ROW_X).getRegionLocation()
- .getPort() == destServerName.getPort());
+ Assert.assertFalse(
+ conn.getCachedLocation(TABLE_NAME3, ROW_X).getRegionLocation()
+ .getPort() == destServerName.getPort());
- // Hijack the number of retry to fail after 2 tries
- final int prevNumRetriesVal = setNumTries(conn, 2);
+ // Hijack the number of retry to fail after 2 tries
+ final int prevNumRetriesVal = setNumTries(conn, 2);
- Put put3 = new Put(ROW_X);
- put3.add(FAM_NAM, ROW_X, ROW_X);
- Put put4 = new Put(otherRow);
- put4.add(FAM_NAM, otherRow, otherRow);
+ Put put3 = new Put(ROW_X);
+ put3.addColumn(FAM_NAM, ROW_X, ROW_X);
+ Put put4 = new Put(otherRow);
+ put4.addColumn(FAM_NAM, otherRow, otherRow);
// do multi
ArrayList<Put> actions = Lists.newArrayList(put4, put3);
table.batch(actions, null); // first should be a valid row,
- // second we get RegionMovedException.
+ // second we get RegionMovedException.
- setNumTries(conn, prevNumRetriesVal);
- } finally {
- table.close();
- }
+ setNumTries(conn, prevNumRetriesVal);
+ } finally {
+ table.close();
+ }
}
@Ignore ("Test presumes RETRY_BACKOFF will never change; it has") @Test
@@ -1095,4 +1097,4 @@ public class TestHCM {
table.close();
connection.close();
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
index 908fcdf..9aa493c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
@@ -110,11 +110,11 @@ public class TestHTableMultiplexer {
for (int i = 0; i < NUM_REGIONS; i++) {
byte [] row = startRows[i];
if (row == null || row.length <= 0) continue;
- Put put = new Put(row).add(FAMILY, QUALIFIER, VALUE1);
+ Put put = new Put(row).addColumn(FAMILY, QUALIFIER, VALUE1);
success = multiplexer.put(TABLE_1, put);
assertTrue("multiplexer.put returns", success);
- put = new Put(row).add(FAMILY, QUALIFIER, VALUE1);
+ put = new Put(row).addColumn(FAMILY, QUALIFIER, VALUE1);
success = multiplexer.put(TABLE_2, put);
assertTrue("multiplexer.put failed", success);
@@ -131,7 +131,7 @@ public class TestHTableMultiplexer {
byte [] row = endRows[i];
if (row == null || row.length <= 0) continue;
Put put = new Put(row);
- put.add(FAMILY, QUALIFIER, VALUE2);
+ put.addColumn(FAMILY, QUALIFIER, VALUE2);
multiput.add(put);
}
failedPuts = multiplexer.put(TABLE_1, multiput);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
index b71e881..063e376 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
@@ -97,7 +97,7 @@ public class TestHTableMultiplexerFlushCache {
byte[] row = startRows[1];
assertTrue("2nd region should not start with empty row", row != null && row.length > 0);
- Put put = new Put(row).add(FAMILY, QUALIFIER1, VALUE1);
+ Put put = new Put(row).addColumn(FAMILY, QUALIFIER1, VALUE1);
assertTrue("multiplexer.put returns", multiplexer.put(TABLE, put));
checkExistence(htable, row, FAMILY, QUALIFIER1, VALUE1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
index 9173dc5..831738c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
@@ -208,7 +208,7 @@ public class TestMetaWithReplicas {
htable = c.getTable(TABLE);
byte[] row = "test".getBytes();
Put put = new Put(row);
- put.add("foo".getBytes(), row, row);
+ put.addColumn("foo".getBytes(), row, row);
BufferedMutator m = c.getBufferedMutator(TABLE);
m.mutate(put);
m.flush();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index 905a7db..c83b709 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -213,10 +213,10 @@ public class TestMultiParallel {
List<Row> actions = new ArrayList<Row>();
Put p = new Put(Bytes.toBytes("row1"));
- p.add(Bytes.toBytes("bad_family"), Bytes.toBytes("qual"), Bytes.toBytes("value"));
+ p.addColumn(Bytes.toBytes("bad_family"), Bytes.toBytes("qual"), Bytes.toBytes("value"));
actions.add(p);
p = new Put(Bytes.toBytes("row2"));
- p.add(BYTES_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
+ p.addColumn(BYTES_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
actions.add(p);
// row1 and row2 should be in the same region.
@@ -423,7 +423,7 @@ public class TestMultiParallel {
for (int i = 0; i < 100; i++) {
Put put = new Put(ONE_ROW);
byte[] qual = Bytes.toBytes("column" + i);
- put.add(BYTES_FAMILY, qual, VALUE);
+ put.addColumn(BYTES_FAMILY, qual, VALUE);
puts.add(put);
}
Object[] results = new Object[puts.size()];
@@ -464,8 +464,8 @@ public class TestMultiParallel {
Delete d = new Delete(ONE_ROW);
table.delete(d);
Put put = new Put(ONE_ROW);
- put.add(BYTES_FAMILY, QUAL1, Bytes.toBytes("abc"));
- put.add(BYTES_FAMILY, QUAL2, Bytes.toBytes(1L));
+ put.addColumn(BYTES_FAMILY, QUAL1, Bytes.toBytes("abc"));
+ put.addColumn(BYTES_FAMILY, QUAL2, Bytes.toBytes(1L));
table.put(put);
Increment inc = new Increment(ONE_ROW);
@@ -494,7 +494,7 @@ public class TestMultiParallel {
final Connection connection = ConnectionFactory.createConnection(UTIL.getConfiguration());
Table table = connection.getTable(TEST_TABLE);
Put put = new Put(ONE_ROW);
- put.add(BYTES_FAMILY, QUALIFIER, Bytes.toBytes(0L));
+ put.addColumn(BYTES_FAMILY, QUALIFIER, Bytes.toBytes(0L));
// Replace nonce manager with the one that returns each nonce twice.
NonceGenerator cnm = new PerClientRandomNonceGenerator() {
@@ -610,7 +610,7 @@ public class TestMultiParallel {
// 2 put of new column
Put put = new Put(KEYS[10]);
- put.add(BYTES_FAMILY, qual2, val2);
+ put.addColumn(BYTES_FAMILY, qual2, val2);
actions.add(put);
// 3 delete
@@ -629,7 +629,7 @@ public class TestMultiParallel {
// 5 put of new column
put = new Put(KEYS[40]);
- put.add(BYTES_FAMILY, qual2, val2);
+ put.addColumn(BYTES_FAMILY, qual2, val2);
actions.add(put);
results = new Object[actions.size()];
@@ -673,7 +673,7 @@ public class TestMultiParallel {
List<Put> puts = new ArrayList<>();
for (byte[] k : KEYS) {
Put put = new Put(k);
- put.add(BYTES_FAMILY, QUALIFIER, VALUE);
+ put.addColumn(BYTES_FAMILY, QUALIFIER, VALUE);
puts.add(put);
}
return puts;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
index ba6a71b..174b430 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
@@ -475,8 +475,8 @@ public class TestMultipleTimestamps {
for(int colIdx: columnIndexes) {
byte column[] = Bytes.toBytes("column:" + colIdx);
for (long version: versions) {
- put.add(cf, column, version, Bytes.toBytes("value-version-" +
- version));
+ put.addColumn(cf, column, version, Bytes.toBytes("value-version-" +
+ version));
}
}
ht.put(put);
@@ -496,7 +496,7 @@ public class TestMultipleTimestamps {
put.setDurability(Durability.SKIP_WAL);
for (long idx = versionStart; idx <= versionEnd; idx++) {
- put.add(cf, column, idx, Bytes.toBytes("value-version-" + idx));
+ put.addColumn(cf, column, idx, Bytes.toBytes("value-version-" + idx));
}
ht.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index 9be2f64..50efed9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -49,7 +49,7 @@ public class TestPutDeleteEtcCellIteration {
Put p = new Put(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
- p.add(bytes, bytes, TIMESTAMP, bytes);
+ p.addColumn(bytes, bytes, TIMESTAMP, bytes);
}
int index = 0;
for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
@@ -65,7 +65,7 @@ public class TestPutDeleteEtcCellIteration {
Put p = new Put(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
- p.add(bytes, bytes, TIMESTAMP, bytes);
+ p.addColumn(bytes, bytes, TIMESTAMP, bytes);
}
int index = 0;
int trigger = 3;
@@ -73,7 +73,7 @@ public class TestPutDeleteEtcCellIteration {
Cell cell = cellScanner.current();
byte [] bytes = Bytes.toBytes(index++);
// When we hit the trigger, try inserting a new KV; should trigger exception
- if (trigger == 3) p.add(bytes, bytes, TIMESTAMP, bytes);
+ if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes);
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
}
assertEquals(COUNT, index);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
index 0e819bb..0b5cd4e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutWithDelete.java
@@ -59,10 +59,10 @@ public class TestPutWithDelete {
try {
// put one row
Put put = new Put(rowKey);
- put.add(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
- put.add(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
- put.add(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
- put.add(family, Bytes.toBytes("D"), Bytes.toBytes("d"));
+ put.addColumn(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
+ put.addColumn(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
+ put.addColumn(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
+ put.addColumn(family, Bytes.toBytes("D"), Bytes.toBytes("d"));
table.put(put);
// get row back and assert the values
Get get = new Get(rowKey);
@@ -77,11 +77,11 @@ public class TestPutWithDelete {
Bytes.toString(result.getValue(family, Bytes.toBytes("D"))).equals("d"));
// put the same row again with C column deleted
put = new Put(rowKey);
- put.add(family, Bytes.toBytes("A"), Bytes.toBytes("a1"));
- put.add(family, Bytes.toBytes("B"), Bytes.toBytes("b1"));
+ put.addColumn(family, Bytes.toBytes("A"), Bytes.toBytes("a1"));
+ put.addColumn(family, Bytes.toBytes("B"), Bytes.toBytes("b1"));
KeyValue marker = new KeyValue(rowKey, family, Bytes.toBytes("C"),
HConstants.LATEST_TIMESTAMP, KeyValue.Type.DeleteColumn);
- put.add(family, Bytes.toBytes("D"), Bytes.toBytes("d1"));
+ put.addColumn(family, Bytes.toBytes("D"), Bytes.toBytes("d1"));
put.add(marker);
table.put(put);
// get row back and assert the values
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
index 723242b..b773b46 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
@@ -196,7 +196,7 @@ public class TestReplicaWithCluster {
bHdt.getColumnFamilies().length + 1, nHdt.getColumnFamilies().length);
p = new Put(row);
- p.add(row, row, row);
+ p.addColumn(row, row, row);
table.put(p);
g = new Get(row);
@@ -253,7 +253,7 @@ public class TestReplicaWithCluster {
admin.close();
Put p = new Put(row);
- p.add(row, row, row);
+ p.addColumn(row, row, row);
final Table table = HTU.getConnection().getTable(hdt.getTableName());
table.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
index a064bcc..d2e775d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java
@@ -384,7 +384,7 @@ public class TestReplicasClient {
flushRegion(hriSecondary);
Put p = new Put(row);
- p.add(f, row, row);
+ p.addColumn(f, row, row);
table.put(p);
flushRegion(hriPrimary);
@@ -404,7 +404,7 @@ public class TestReplicasClient {
flushRegion(hriPrimary);
Put p = new Put(row);
- p.add(f, row, row);
+ p.addColumn(f, row, row);
table.put(p);
flushRegion(hriPrimary);
@@ -422,7 +422,7 @@ public class TestReplicasClient {
flushRegion(hriSecondary);
Put p = new Put(row);
- p.add(f, row, row);
+ p.addColumn(f, row, row);
table.put(p);
flushRegion(hriSecondary);
@@ -442,7 +442,7 @@ public class TestReplicasClient {
try {
// A simple put works, even if there here a second replica
Put p = new Put(b1);
- p.add(f, b1, b1);
+ p.addColumn(f, b1, b1);
table.put(p);
LOG.info("Put done");
@@ -534,12 +534,12 @@ public class TestReplicasClient {
List<Put> puts = new ArrayList<Put>(2);
byte[] b1 = Bytes.toBytes("testCancelOfMultiGet" + 0);
Put p = new Put(b1);
- p.add(f, b1, b1);
+ p.addColumn(f, b1, b1);
puts.add(p);
byte[] b2 = Bytes.toBytes("testCancelOfMultiGet" + 1);
p = new Put(b2);
- p.add(f, b2, b2);
+ p.addColumn(f, b2, b2);
puts.add(p);
table.put(puts);
LOG.debug("PUT done");
@@ -619,7 +619,7 @@ public class TestReplicasClient {
for (int i = 0; i < NUMROWS; i++) {
byte[] b1 = Bytes.toBytes("testUseRegionWithReplica" + i);
Put p = new Put(b1);
- p.add(f, b1, b1);
+ p.addColumn(f, b1, b1);
table.put(p);
}
LOG.debug("PUT done");
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
index fca34c1..c087135 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
@@ -135,7 +135,7 @@ public class TestRpcControllerFactory {
Table table = connection.getTable(name);
byte[] row = Bytes.toBytes("row");
Put p = new Put(row);
- p.add(fam1, fam1, Bytes.toBytes("val0"));
+ p.addColumn(fam1, fam1, Bytes.toBytes("val0"));
table.put(p);
Integer counter = 1;
@@ -147,7 +147,7 @@ public class TestRpcControllerFactory {
counter = verifyCount(counter);
Put p2 = new Put(row);
- p2.add(fam1, Bytes.toBytes("qual"), Bytes.toBytes("val1"));
+ p2.addColumn(fam1, Bytes.toBytes("qual"), Bytes.toBytes("val1"));
table.batch(Lists.newArrayList(p, p2), null);
// this only goes to a single server, so we don't need to change the count here
counter = verifyCount(counter);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
index f86f248..2fb3aff 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
@@ -68,9 +68,9 @@ public class TestScannerTimeout {
// We need more than one region server for this test
TEST_UTIL.startMiniCluster(2);
Table table = TEST_UTIL.createTable(TABLE_NAME, SOME_BYTES);
- for (int i = 0; i < NB_ROWS; i++) {
+ for (int i = 0; i < NB_ROWS; i++) {
Put put = new Put(Bytes.toBytes(i));
- put.add(SOME_BYTES, SOME_BYTES, SOME_BYTES);
+ put.addColumn(SOME_BYTES, SOME_BYTES, SOME_BYTES);
table.put(put);
}
table.close();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 7bff686..567e887 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -176,15 +176,15 @@ public class TestTimestampsFilter {
Table ht = TEST_UTIL.createTable(TableName.valueOf(TABLE), FAMILIES, Integer.MAX_VALUE);
Put p = new Put(Bytes.toBytes("row"));
- p.add(FAMILY, Bytes.toBytes("column0"), 3, Bytes.toBytes("value0-3"));
- p.add(FAMILY, Bytes.toBytes("column1"), 3, Bytes.toBytes("value1-3"));
- p.add(FAMILY, Bytes.toBytes("column2"), 1, Bytes.toBytes("value2-1"));
- p.add(FAMILY, Bytes.toBytes("column2"), 2, Bytes.toBytes("value2-2"));
- p.add(FAMILY, Bytes.toBytes("column2"), 3, Bytes.toBytes("value2-3"));
- p.add(FAMILY, Bytes.toBytes("column3"), 2, Bytes.toBytes("value3-2"));
- p.add(FAMILY, Bytes.toBytes("column4"), 1, Bytes.toBytes("value4-1"));
- p.add(FAMILY, Bytes.toBytes("column4"), 2, Bytes.toBytes("value4-2"));
- p.add(FAMILY, Bytes.toBytes("column4"), 3, Bytes.toBytes("value4-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column0"), (long) 3, Bytes.toBytes("value0-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column1"), (long) 3, Bytes.toBytes("value1-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column2"), (long) 1, Bytes.toBytes("value2-1"));
+ p.addColumn(FAMILY, Bytes.toBytes("column2"), (long) 2, Bytes.toBytes("value2-2"));
+ p.addColumn(FAMILY, Bytes.toBytes("column2"), (long) 3, Bytes.toBytes("value2-3"));
+ p.addColumn(FAMILY, Bytes.toBytes("column3"), (long) 2, Bytes.toBytes("value3-2"));
+ p.addColumn(FAMILY, Bytes.toBytes("column4"), (long) 1, Bytes.toBytes("value4-1"));
+ p.addColumn(FAMILY, Bytes.toBytes("column4"), (long) 2, Bytes.toBytes("value4-2"));
+ p.addColumn(FAMILY, Bytes.toBytes("column4"), (long) 3, Bytes.toBytes("value4-3"));
ht.put(p);
ArrayList<Long> timestamps = new ArrayList<Long>();
@@ -360,7 +360,7 @@ public class TestTimestampsFilter {
put.setDurability(Durability.SKIP_WAL);
for (long idx = versionStart; idx <= versionEnd; idx++) {
- put.add(cf, column, idx, Bytes.toBytes("value-version-" + idx));
+ put.addColumn(cf, column, idx, Bytes.toBytes("value-version-" + idx));
}
ht.put(put);
[2/8] hbase git commit: HBASE-14675 Exorcise deprecated Put#add(...)
and replace with Put#addColumn(...)
Posted by jm...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
index 05e1693..9374fdb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestCompactionWithThroughputController.java
@@ -91,7 +91,7 @@ public class TestCompactionWithThroughputController {
for (int j = 0; j < 10; j++) {
byte[] value = new byte[128 * 1024];
rand.nextBytes(value);
- table.put(new Put(Bytes.toBytes(i * 10 + j)).add(family, qualifier, value));
+ table.put(new Put(Bytes.toBytes(i * 10 + j)).addColumn(family, qualifier, value));
}
admin.flush(tableName);
}
@@ -207,18 +207,21 @@ public class TestCompactionWithThroughputController {
assertEquals(10L * 1024 * 1024, throughputController.maxThroughput, EPSILON);
Table table = conn.getTable(tableName);
for (int i = 0; i < 5; i++) {
- table.put(new Put(Bytes.toBytes(i)).add(family, qualifier, new byte[0]));
+ byte[] value = new byte[0];
+ table.put(new Put(Bytes.toBytes(i)).addColumn(family, qualifier, value));
TEST_UTIL.flush(tableName);
}
Thread.sleep(2000);
assertEquals(15L * 1024 * 1024, throughputController.maxThroughput, EPSILON);
- table.put(new Put(Bytes.toBytes(5)).add(family, qualifier, new byte[0]));
+ byte[] value1 = new byte[0];
+ table.put(new Put(Bytes.toBytes(5)).addColumn(family, qualifier, value1));
TEST_UTIL.flush(tableName);
Thread.sleep(2000);
assertEquals(20L * 1024 * 1024, throughputController.maxThroughput, EPSILON);
- table.put(new Put(Bytes.toBytes(6)).add(family, qualifier, new byte[0]));
+ byte[] value = new byte[0];
+ table.put(new Put(Bytes.toBytes(6)).addColumn(family, qualifier, value));
TEST_UTIL.flush(tableName);
Thread.sleep(2000);
assertEquals(Double.MAX_VALUE, throughputController.maxThroughput, EPSILON);
@@ -259,27 +262,35 @@ public class TestCompactionWithThroughputController {
assertEquals(0.0, store.getCompactionPressure(), EPSILON);
Table table = conn.getTable(tableName);
for (int i = 0; i < 4; i++) {
- table.put(new Put(Bytes.toBytes(i)).add(family, qualifier, new byte[0]));
- table.put(new Put(Bytes.toBytes(100 + i)).add(family, qualifier, new byte[0]));
+ byte[] value1 = new byte[0];
+ table.put(new Put(Bytes.toBytes(i)).addColumn(family, qualifier, value1));
+ byte[] value = new byte[0];
+ table.put(new Put(Bytes.toBytes(100 + i)).addColumn(family, qualifier, value));
TEST_UTIL.flush(tableName);
}
assertEquals(8, store.getStorefilesCount());
assertEquals(0.0, store.getCompactionPressure(), EPSILON);
- table.put(new Put(Bytes.toBytes(4)).add(family, qualifier, new byte[0]));
- table.put(new Put(Bytes.toBytes(104)).add(family, qualifier, new byte[0]));
+ byte[] value5 = new byte[0];
+ table.put(new Put(Bytes.toBytes(4)).addColumn(family, qualifier, value5));
+ byte[] value4 = new byte[0];
+ table.put(new Put(Bytes.toBytes(104)).addColumn(family, qualifier, value4));
TEST_UTIL.flush(tableName);
assertEquals(10, store.getStorefilesCount());
assertEquals(0.5, store.getCompactionPressure(), EPSILON);
- table.put(new Put(Bytes.toBytes(5)).add(family, qualifier, new byte[0]));
- table.put(new Put(Bytes.toBytes(105)).add(family, qualifier, new byte[0]));
+ byte[] value3 = new byte[0];
+ table.put(new Put(Bytes.toBytes(5)).addColumn(family, qualifier, value3));
+ byte[] value2 = new byte[0];
+ table.put(new Put(Bytes.toBytes(105)).addColumn(family, qualifier, value2));
TEST_UTIL.flush(tableName);
assertEquals(12, store.getStorefilesCount());
assertEquals(1.0, store.getCompactionPressure(), EPSILON);
- table.put(new Put(Bytes.toBytes(6)).add(family, qualifier, new byte[0]));
- table.put(new Put(Bytes.toBytes(106)).add(family, qualifier, new byte[0]));
+ byte[] value1 = new byte[0];
+ table.put(new Put(Bytes.toBytes(6)).addColumn(family, qualifier, value1));
+ byte[] value = new byte[0];
+ table.put(new Put(Bytes.toBytes(106)).addColumn(family, qualifier, value));
TEST_UTIL.flush(tableName);
assertEquals(14, store.getStorefilesCount());
assertEquals(2.0, store.getCompactionPressure(), EPSILON);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
index 72e4330..0662716 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
@@ -218,7 +218,7 @@ public class TestDurability {
private Put newPut(Durability durability) {
Put p = new Put(ROW);
- p.add(FAMILY, COL, COL);
+ p.addColumn(FAMILY, COL, COL);
if (durability != null) {
p.setDurability(durability);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
index 8106acf..9dccffe 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
@@ -145,13 +145,13 @@ public class TestLogRollAbort {
FSUtils.isAppendSupported(TEST_UTIL.getConfiguration()));
Put p = new Put(Bytes.toBytes("row2001"));
- p.add(HConstants.CATALOG_FAMILY, Bytes.toBytes("col"), Bytes.toBytes(2001));
+ p.addColumn(HConstants.CATALOG_FAMILY, Bytes.toBytes("col"), Bytes.toBytes(2001));
table.put(p);
log.sync();
p = new Put(Bytes.toBytes("row2002"));
- p.add(HConstants.CATALOG_FAMILY, Bytes.toBytes("col"), Bytes.toBytes(2002));
+ p.addColumn(HConstants.CATALOG_FAMILY, Bytes.toBytes("col"), Bytes.toBytes(2002));
table.put(p);
dfsCluster.restartDataNodes();
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
index 56d0139..f7e5ff7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollPeriod.java
@@ -108,7 +108,7 @@ public class TestLogRollPeriod {
long row = 0;
while (!interrupted()) {
Put p = new Put(Bytes.toBytes(String.format("row%d", row)));
- p.add(Bytes.toBytes(family), Bytes.toBytes("col"), Bytes.toBytes(row));
+ p.addColumn(Bytes.toBytes(family), Bytes.toBytes("col"), Bytes.toBytes(row));
table.put(p);
row++;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
index a5d366b..3ab49c0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
@@ -268,11 +268,11 @@ public class TestLogRolling {
for (int i = 0; i < 10; i++) {
Put put = new Put(Bytes.toBytes("row"
+ String.format("%1$04d", (start + i))));
- put.add(HConstants.CATALOG_FAMILY, null, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, null, value);
table.put(put);
}
Put tmpPut = new Put(Bytes.toBytes("tmprow"));
- tmpPut.add(HConstants.CATALOG_FAMILY, null, value);
+ tmpPut.addColumn(HConstants.CATALOG_FAMILY, null, value);
long startTime = System.currentTimeMillis();
long remaining = timeout;
while (remaining > 0) {
@@ -631,7 +631,7 @@ public class TestLogRolling {
private void doPut(Table table, int i) throws IOException {
Put put = new Put(Bytes.toBytes("row" + String.format("%1$04d", i)));
- put.add(HConstants.CATALOG_FAMILY, null, value);
+ put.addColumn(HConstants.CATALOG_FAMILY, null, value);
table.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
index 1c97a2d..549a018 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
@@ -177,7 +177,7 @@ public class TestWALReplay {
TEST_UTIL.createTable(tableName, familys);
Table htable = TEST_UTIL.getConnection().getTable(tableName);
Put put = new Put(Bytes.toBytes("r1"));
- put.add(family1, qualifier, value);
+ put.addColumn(family1, qualifier, value);
htable.put(put);
ResultScanner resultScanner = htable.getScanner(new Scan());
int count = 0;
@@ -353,7 +353,7 @@ public class TestWALReplay {
// Add an edit so something in the WAL
byte [] row = tableName.getName();
- region.put((new Put(row)).add(family, family, family));
+ region.put((new Put(row)).addColumn(family, family, family));
wal.sync();
final int rowsInsertedCount = 11;
@@ -412,7 +412,7 @@ public class TestWALReplay {
// Add an edit so something in the WAL
byte [] row = tableName.getName();
byte [] family = htd.getFamilies().iterator().next().getName();
- region.put((new Put(row)).add(family, family, family));
+ region.put((new Put(row)).addColumn(family, family, family));
wal.sync();
List <Pair<byte[],String>> hfs= new ArrayList<Pair<byte[],String>>(1);
@@ -686,7 +686,7 @@ public class TestWALReplay {
htd.getFamilies());
for (int i = 0; i < writtenRowCount; i++) {
Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i)));
- put.add(families.get(i % families.size()).getName(), Bytes.toBytes("q"),
+ put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"),
Bytes.toBytes("val"));
region.put(put);
}
@@ -712,7 +712,7 @@ public class TestWALReplay {
int moreRow = 10;
for (int i = writtenRowCount; i < writtenRowCount + moreRow; i++) {
Put put = new Put(Bytes.toBytes(tableName + Integer.toString(i)));
- put.add(families.get(i % families.size()).getName(), Bytes.toBytes("q"),
+ put.addColumn(families.get(i % families.size()).getName(), Bytes.toBytes("q"),
Bytes.toBytes("val"));
region.put(put);
}
@@ -1008,7 +1008,7 @@ public class TestWALReplay {
for (int j = 0; j < count; j++) {
byte[] qualifier = Bytes.toBytes(qualifierPrefix + Integer.toString(j));
Put p = new Put(rowName);
- p.add(family, qualifier, ee.currentTime(), rowName);
+ p.addColumn(family, qualifier, ee.currentTime(), rowName);
r.put(p);
puts.add(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
index c7ffe25..184fd14 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
@@ -195,7 +195,7 @@ public class TestMasterReplication {
// cluster 0
putAndWait(row3, famName, htables[0], htables[1]);
// now add a local edit to cluster 1
- htables[1].put(new Put(row4).add(famName, row4, row4));
+ htables[1].put(new Put(row4).addColumn(famName, row4, row4));
// re-enable replication from cluster 2 to cluster 0
enablePeer("1", 2);
// without HBASE-9158 the edit for row4 would have been marked with
@@ -373,7 +373,7 @@ public class TestMasterReplication {
private void putAndWait(byte[] row, byte[] fam, Table source, Table target)
throws Exception {
Put put = new Put(row);
- put.add(fam, row, row);
+ put.addColumn(fam, row, row);
source.put(put);
wait(row, target, false);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
index 9bd302e..82a0680 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
@@ -171,7 +171,7 @@ public class TestMultiSlaveReplication {
checkRow(row3,1,htable3);
Put p = new Put(row);
- p.add(famName, row, row);
+ p.addColumn(famName, row, row);
htable1.put(p);
// now roll the logs again
rollWALAndWait(utility1, htable1.getName(), row);
@@ -299,7 +299,7 @@ public class TestMultiSlaveReplication {
private void putAndWait(byte[] row, byte[] fam, Table source, Table... targets)
throws Exception {
Put put = new Put(row);
- put.add(fam, row, row);
+ put.addColumn(fam, row, row);
source.put(put);
Get get = new Get(row);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
index 169feba..c9b20d5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
@@ -439,7 +439,7 @@ public class TestPerTableCFReplication {
Table source, Table... targets)
throws Exception {
Put put = new Put(row);
- put.add(fam, row, val);
+ put.addColumn(fam, row, val);
source.put(put);
Get get = new Get(row);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
index 4bb1842..ba2a7c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationChangingPeerRegionservers.java
@@ -115,7 +115,7 @@ public class TestReplicationChangingPeerRegionservers extends TestReplicationBas
private void doPutTest(byte[] row) throws IOException, InterruptedException {
Put put = new Put(row);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
if (htable1 == null) {
htable1 = utility1.getConnection().getTable(tableName);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
index 3378c3f..06e1698 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
@@ -54,7 +54,7 @@ public class TestReplicationDisableInactivePeer extends TestReplicationBase {
byte[] rowkey = Bytes.toBytes("disable inactive peer");
Put put = new Put(rowkey);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
htable1.put(put);
// wait for the sleep interval of the master cluster to become long
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
index e247349..66adf70 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
@@ -217,7 +217,7 @@ public class TestReplicationEndpoint extends TestReplicationBase {
private void doPut(final Connection connection, final byte [] row) throws IOException {
try (Table t = connection.getTable(tableName)) {
Put put = new Put(row);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
t.put(put);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 2dc3c89..7d51ef5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -123,15 +123,15 @@ public class TestReplicationSmallTests extends TestReplicationBase {
long t = EnvironmentEdgeManager.currentTime();
// create three versions for "row"
Put put = new Put(row);
- put.add(famName, row, t, v1);
+ put.addColumn(famName, row, t, v1);
htable1.put(put);
put = new Put(row);
- put.add(famName, row, t+1, v2);
+ put.addColumn(famName, row, t + 1, v2);
htable1.put(put);
put = new Put(row);
- put.add(famName, row, t+2, v3);
+ put.addColumn(famName, row, t + 2, v3);
htable1.put(put);
Get get = new Get(row);
@@ -203,7 +203,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
public void testSimplePutDelete() throws Exception {
LOG.info("testSimplePutDelete");
Put put = new Put(row);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
htable1 = utility1.getConnection().getTable(tableName);
htable1.put(put);
@@ -252,7 +252,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
List<Put> puts = new ArrayList<>();
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
Put put = new Put(Bytes.toBytes(i));
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
puts.add(put);
}
htable1.put(puts);
@@ -295,7 +295,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
byte[] rowkey = Bytes.toBytes("disable enable");
Put put = new Put(rowkey);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
htable1.put(put);
Get get = new Get(rowkey);
@@ -338,7 +338,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
Thread.sleep(SLEEP_TIME);
byte[] rowKey = Bytes.toBytes("Won't be replicated");
Put put = new Put(rowKey);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
htable1.put(put);
Get get = new Get(rowKey);
@@ -359,7 +359,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
Thread.sleep(SLEEP_TIME);
rowKey = Bytes.toBytes("do rep");
put = new Put(rowKey);
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
LOG.info("Adding new row");
htable1.put(put);
@@ -391,7 +391,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
List<Put> puts = new ArrayList<Put>();
for (int i = 0; i < NB_ROWS_IN_BIG_BATCH; i++) {
Put put = new Put(Bytes.toBytes(i));
- put.add(famName, row, row);
+ put.addColumn(famName, row, row);
puts.add(put);
}
htable1.setWriteBufferSize(1024);
@@ -472,8 +472,8 @@ public class TestReplicationSmallTests extends TestReplicationBase {
for (Result result : rs) {
put = new Put(result.getRow());
Cell firstVal = result.rawCells()[0];
- put.add(CellUtil.cloneFamily(firstVal),
- CellUtil.cloneQualifier(firstVal), Bytes.toBytes("diff data"));
+ put.addColumn(CellUtil.cloneFamily(firstVal), CellUtil.cloneQualifier(firstVal),
+ Bytes.toBytes("diff data"));
htable2.put(put);
}
Delete delete = new Delete(put.getRow());
@@ -579,7 +579,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
p = new Put(Bytes.toBytes("row" + i));
- p.add(famName, qualName, Bytes.toBytes("val" + i));
+ p.addColumn(famName, qualName, Bytes.toBytes("val" + i));
htable1.put(p);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
index 26acdab..13545b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSyncUpTool.java
@@ -225,21 +225,21 @@ public class TestReplicationSyncUpTool extends TestReplicationBase {
// 100 + 1 row to t1_syncup
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
p = new Put(Bytes.toBytes("row" + i));
- p.add(famName, qualName, Bytes.toBytes("val" + i));
+ p.addColumn(famName, qualName, Bytes.toBytes("val" + i));
ht1Source.put(p);
}
p = new Put(Bytes.toBytes("row" + 9999));
- p.add(noRepfamName, qualName, Bytes.toBytes("val" + 9999));
+ p.addColumn(noRepfamName, qualName, Bytes.toBytes("val" + 9999));
ht1Source.put(p);
// 200 + 1 row to t2_syncup
for (int i = 0; i < NB_ROWS_IN_BATCH * 2; i++) {
p = new Put(Bytes.toBytes("row" + i));
- p.add(famName, qualName, Bytes.toBytes("val" + i));
+ p.addColumn(famName, qualName, Bytes.toBytes("val" + i));
ht2Source.put(p);
}
p = new Put(Bytes.toBytes("row" + 9999));
- p.add(noRepfamName, qualName, Bytes.toBytes("val" + 9999));
+ p.addColumn(noRepfamName, qualName, Bytes.toBytes("val" + 9999));
ht2Source.put(p);
// ensure replication completed
@@ -351,22 +351,22 @@ public class TestReplicationSyncUpTool extends TestReplicationBase {
// we should see 100 + 2 rows now
for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
p = new Put(Bytes.toBytes("row" + i));
- p.add(famName, qualName, Bytes.toBytes("val" + i));
+ p.addColumn(famName, qualName, Bytes.toBytes("val" + i));
ht1Source.put(p);
}
p = new Put(Bytes.toBytes("row" + 9998));
- p.add(noRepfamName, qualName, Bytes.toBytes("val" + 9998));
+ p.addColumn(noRepfamName, qualName, Bytes.toBytes("val" + 9998));
ht1Source.put(p);
// another 200 + 1 row to t1_syncup
// we should see 200 + 2 rows now
for (int i = 0; i < NB_ROWS_IN_BATCH * 2; i++) {
p = new Put(Bytes.toBytes("row" + i));
- p.add(famName, qualName, Bytes.toBytes("val" + i));
+ p.addColumn(famName, qualName, Bytes.toBytes("val" + i));
ht2Source.put(p);
}
p = new Put(Bytes.toBytes("row" + 9998));
- p.add(noRepfamName, qualName, Bytes.toBytes("val" + 9998));
+ p.addColumn(noRepfamName, qualName, Bytes.toBytes("val" + 9998));
ht2Source.put(p);
int rowCount_ht1Source = utility1.countRows(ht1Source);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
index 7f5b59c..988373f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java
@@ -167,7 +167,7 @@ public class TestReplicationWithTags {
LOG.info("testSimplePutDelete");
Put put = new Put(ROW);
put.setAttribute("visibility", Bytes.toBytes("myTag3"));
- put.add(FAMILY, ROW, ROW);
+ put.addColumn(FAMILY, ROW, ROW);
htable1 = utility1.getConnection().getTable(TABLE_NAME);
htable1.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
index 6b0534e..cf01463 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
@@ -115,8 +115,8 @@ public class TestAccessControlFilter extends SecureTestUtil {
List<Put> puts = new ArrayList<Put>(100);
for (int i=0; i<100; i++) {
Put p = new Put(Bytes.toBytes(i));
- p.add(FAMILY, PRIVATE_COL, Bytes.toBytes("secret "+i));
- p.add(FAMILY, PUBLIC_COL, Bytes.toBytes("info "+i));
+ p.addColumn(FAMILY, PRIVATE_COL, Bytes.toBytes("secret " + i));
+ p.addColumn(FAMILY, PUBLIC_COL, Bytes.toBytes("info " + i));
puts.add(p);
}
table.put(puts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 60d34ac..8d97915 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -920,7 +920,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put p = new Put(TEST_ROW);
- p.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1));
+ p.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1));
try(Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(TEST_TABLE)) {
t.put(p);
@@ -984,7 +984,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put p = new Put(TEST_ROW);
- p.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1));
+ p.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(1));
try(Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(TEST_TABLE);) {
t.checkAndPut(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER,
@@ -1122,7 +1122,7 @@ public class TestAccessController extends SecureTestUtil {
byte[] row = TEST_ROW;
byte[] qualifier = TEST_QUALIFIER;
Put put = new Put(row);
- put.add(TEST_FAMILY, qualifier, Bytes.toBytes(1));
+ put.addColumn(TEST_FAMILY, qualifier, Bytes.toBytes(1));
Append append = new Append(row);
append.add(TEST_FAMILY, qualifier, Bytes.toBytes(2));
try(Connection conn = ConnectionFactory.createConnection(conf);
@@ -1251,8 +1251,8 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put p = new Put(Bytes.toBytes("a"));
- p.add(family1, qualifier, Bytes.toBytes("v1"));
- p.add(family2, qualifier, Bytes.toBytes("v2"));
+ p.addColumn(family1, qualifier, Bytes.toBytes("v1"));
+ p.addColumn(family2, qualifier, Bytes.toBytes("v2"));
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName);) {
@@ -1266,7 +1266,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put p = new Put(Bytes.toBytes("a"));
- p.add(family1, qualifier, Bytes.toBytes("v1"));
+ p.addColumn(family1, qualifier, Bytes.toBytes("v1"));
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName)) {
@@ -1280,7 +1280,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put p = new Put(Bytes.toBytes("a"));
- p.add(family2, qualifier, Bytes.toBytes("v2"));
+ p.addColumn(family2, qualifier, Bytes.toBytes("v2"));
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName);) {
t.put(p);
@@ -1515,7 +1515,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put p = new Put(TEST_ROW);
- p.add(family1, qualifier, Bytes.toBytes("v1"));
+ p.addColumn(family1, qualifier, Bytes.toBytes("v1"));
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName)) {
t.put(p);
@@ -2145,7 +2145,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Put put = new Put(Bytes.toBytes("test"));
- put.add(TEST_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
+ put.addColumn(TEST_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value"));
table.put(put);
return null;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
index 7151c46..b939156 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController2.java
@@ -288,8 +288,8 @@ public class TestAccessController2 extends SecureTestUtil {
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(AccessControlLists.ACL_TABLE_NAME)) {
- t.put(new Put(TEST_ROW).add(AccessControlLists.ACL_LIST_FAMILY, TEST_QUALIFIER,
- TEST_VALUE));
+ t.put(new Put(TEST_ROW).addColumn(AccessControlLists.ACL_LIST_FAMILY,
+ TEST_QUALIFIER, TEST_VALUE));
return null;
} finally {
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
index a98bfc3..549db3c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
@@ -169,20 +169,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p;
// with ro ACL
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(writePerms);
t.put(p);
// with ro ACL
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(readPerms);
t.put(p);
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(writePerms);
t.put(p);
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(readPerms);
t.put(p);
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(writePerms);
t.put(p);
}
@@ -228,13 +228,13 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try(Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p;
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(writePerms);
t.put(p);
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(readPerms);
t.put(p);
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
p.setACL(writePerms);
t.put(p);
}
@@ -275,15 +275,15 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
// with rw ACL for "user1"
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO);
p.setACL(user1.getShortName(), new Permission(Permission.Action.READ,
Permission.Action.WRITE));
t.put(p);
// with rw ACL for "user1"
p = new Put(TEST_ROW2);
- p.add(TEST_FAMILY1, TEST_Q1, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO);
p.setACL(user1.getShortName(), new Permission(Permission.Action.READ,
Permission.Action.WRITE));
t.put(p);
@@ -300,8 +300,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
// with rw ACL for "user1", "user2" and "@group"
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO);
Map<String, Permission> perms =
prepareCellPermissions(new String[] { user1.getShortName(), user2.getShortName(),
AuthUtil.toGroupEntry(GROUP) }, Action.READ, Action.WRITE);
@@ -309,8 +309,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
t.put(p);
// with rw ACL for "user1", "user2" and "@group"
p = new Put(TEST_ROW2);
- p.add(TEST_FAMILY1, TEST_Q1, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO);
p.setACL(perms);
t.put(p);
}
@@ -391,17 +391,17 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
// Store a read write ACL without a timestamp, server will use current time
- Put p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q2, ONE);
+ Put p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q2, ONE);
Map<String, Permission> readAndWritePerms =
prepareCellPermissions(usersAndGroups, Action.READ, Action.WRITE);
p.setACL(readAndWritePerms);
t.put(p);
- p = new Put(TEST_ROW).add(TEST_FAMILY2, TEST_Q2, ONE);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY2, TEST_Q2, ONE);
p.setACL(readAndWritePerms);
t.put(p);
LOG.info("Stored at current time");
// Store read only ACL at a future time
- p = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1,
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1,
EnvironmentEdgeManager.currentTime() + 1000000, ZERO);
p.setACL(prepareCellPermissions(new String[]{ USER_OTHER.getShortName(),
AuthUtil.toGroupEntry(GROUP)}, Action.READ));
@@ -484,8 +484,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
// This version (TS = 123) with rw ACL for USER_OTHER and USER_OTHER2
Put p = new Put(TEST_ROW);
- p.add(TEST_FAMILY1, TEST_Q1, 123L, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, 123L, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 123L, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 123L, ZERO);
p.setACL(prepareCellPermissions(
new String[] { USER_OTHER.getShortName(), AuthUtil.toGroupEntry(GROUP),
USER_OTHER2.getShortName() }, Permission.Action.READ, Permission.Action.WRITE));
@@ -493,8 +493,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
// This version (TS = 125) with rw ACL for USER_OTHER
p = new Put(TEST_ROW);
- p.add(TEST_FAMILY1, TEST_Q1, 125L, ONE);
- p.add(TEST_FAMILY1, TEST_Q2, 125L, ONE);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 125L, ONE);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 125L, ONE);
p.setACL(prepareCellPermissions(
new String[] { USER_OTHER.getShortName(), AuthUtil.toGroupEntry(GROUP) },
Action.READ, Action.WRITE));
@@ -502,8 +502,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
// This version (TS = 127) with rw ACL for USER_OTHER
p = new Put(TEST_ROW);
- p.add(TEST_FAMILY1, TEST_Q1, 127L, TWO);
- p.add(TEST_FAMILY1, TEST_Q2, 127L, TWO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, 127L, TWO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, 127L, TWO);
p.setACL(prepareCellPermissions(
new String[] { USER_OTHER.getShortName(), AuthUtil.toGroupEntry(GROUP) },
Action.READ, Action.WRITE));
@@ -570,36 +570,36 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
new String[] { user2.getShortName(), AuthUtil.toGroupEntry(GROUP),
USER_OWNER.getShortName() }, Action.READ, Action.WRITE);
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY2, TEST_Q1, 123, ZERO);
- p.add(TEST_FAMILY2, TEST_Q2, 123, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY2, TEST_Q1, 125, ZERO);
- p.add(TEST_FAMILY2, TEST_Q2, 125, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 125, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 125, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY2, TEST_Q1, 129, ZERO);
- p.add(TEST_FAMILY2, TEST_Q2, 129, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q1, (long) 129, ZERO);
+ p.addColumn(TEST_FAMILY2, TEST_Q2, (long) 129, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
}
@@ -676,20 +676,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
new String[] { user2.getShortName(), AuthUtil.toGroupEntry(GROUP),
USER_OWNER.getShortName() }, Action.READ, Action.WRITE);
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
}
@@ -768,20 +768,20 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
permsU2andGUandOwner.put(USER_OWNER.getShortName(), new Permission(Permission.Action.READ,
Permission.Action.WRITE));
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
p.setACL(permsU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
}
@@ -799,8 +799,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 125, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 125, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, ZERO);
p.setACL(user2.getShortName(), new Permission(Permission.Action.READ,
Permission.Action.WRITE));
t.put(p);
@@ -864,26 +864,26 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
AuthUtil.toGroupEntry(GROUP) }, Action.READ, Action.WRITE);
Put p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 120, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, 120, ZERO);
- p.add(TEST_FAMILY1, TEST_Q3, 120, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 120, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 120, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q3, (long) 120, ZERO);
p.setACL(permsU1andU2andGUandOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 123, ZERO);
- p.add(TEST_FAMILY1, TEST_Q2, 123, ZERO);
- p.add(TEST_FAMILY1, TEST_Q3, 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 123, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q3, (long) 123, ZERO);
p.setACL(permsU1andOwner);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q1, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q1, (long) 127, ZERO);
p.setACL(permsU1_U2andGU);
t.put(p);
p = new Put(TEST_ROW1);
- p.add(TEST_FAMILY1, TEST_Q2, 127, ZERO);
+ p.addColumn(TEST_FAMILY1, TEST_Q2, (long) 127, ZERO);
p.setACL(user2.getShortName(), new Permission(Permission.Action.READ));
t.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
index f73fcfd..7f1e720 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
@@ -161,17 +161,17 @@ public class TestCellACLs extends SecureTestUtil {
Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p;
// with ro ACL
- p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q1, ZERO);
p.setACL(prepareCellPermissions(usersAndGroups, Action.READ));
t.put(p);
// with rw ACL
- p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q2, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q2, ZERO);
p.setACL(prepareCellPermissions(usersAndGroups, Action.READ, Action.WRITE));
t.put(p);
// no ACL
p = new Put(TEST_ROW)
- .add(TEST_FAMILY, TEST_Q3, ZERO)
- .add(TEST_FAMILY, TEST_Q4, ZERO);
+ .addColumn(TEST_FAMILY, TEST_Q3, ZERO)
+ .addColumn(TEST_FAMILY, TEST_Q4, ZERO);
t.put(p);
}
return null;
@@ -401,7 +401,7 @@ public class TestCellACLs extends SecureTestUtil {
try(Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p;
- p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q1, ZERO);
t.put(p);
}
return null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
index 06bc616..74105e1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestScanEarlyTermination.java
@@ -161,14 +161,14 @@ public class TestScanEarlyTermination extends SecureTestUtil {
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
- Put put = new Put(TEST_ROW).add(TEST_FAMILY1, TEST_Q1, ZERO);
+ Put put = new Put(TEST_ROW).addColumn(TEST_FAMILY1, TEST_Q1, ZERO);
t.put(put);
// Set a READ cell ACL for USER_OTHER on this value in FAMILY2
- put = new Put(TEST_ROW).add(TEST_FAMILY2, TEST_Q1, ZERO);
+ put = new Put(TEST_ROW).addColumn(TEST_FAMILY2, TEST_Q1, ZERO);
put.setACL(USER_OTHER.getShortName(), new Permission(Action.READ));
t.put(put);
// Set an empty cell ACL for USER_OTHER on this other value in FAMILY2
- put = new Put(TEST_ROW).add(TEST_FAMILY2, TEST_Q2, ZERO);
+ put = new Put(TEST_ROW).addColumn(TEST_FAMILY2, TEST_Q2, ZERO);
put.setACL(USER_OTHER.getShortName(), new Permission());
t.put(put);
} finally {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
index 2934ad1..f7def51 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
@@ -289,9 +289,9 @@ public class TestTablePermissions {
Table table = UTIL.getConnection().getTable(TEST_TABLE);
table.put(new Put(Bytes.toBytes("row1"))
- .add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v1")));
+ .addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v1")));
table.put(new Put(Bytes.toBytes("row2"))
- .add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v2")));
+ .addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes("v2")));
Admin admin = UTIL.getHBaseAdmin();
admin.split(TEST_TABLE);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
index 880056f..4c6df38 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java
@@ -1010,17 +1010,17 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
Table t = connection.getTable(TEST_TABLE.getTableName())) {
Put p;
// with ro ACL
- p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q1, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q1, ZERO);
p.setACL(USER_NONE.getShortName(), new Permission(Action.READ));
t.put(p);
// with rw ACL
- p = new Put(TEST_ROW).add(TEST_FAMILY, TEST_Q2, ZERO);
+ p = new Put(TEST_ROW).addColumn(TEST_FAMILY, TEST_Q2, ZERO);
p.setACL(USER_NONE.getShortName(), new Permission(Action.READ, Action.WRITE));
t.put(p);
// no ACL
p = new Put(TEST_ROW)
- .add(TEST_FAMILY, TEST_Q3, ZERO)
- .add(TEST_FAMILY, TEST_Q4, ZERO);
+ .addColumn(TEST_FAMILY, TEST_Q3, ZERO)
+ .addColumn(TEST_FAMILY, TEST_Q4, ZERO);
t.put(p);
}
return null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
index 39b7f1b..9da2531 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java
@@ -106,15 +106,15 @@ public class TestDefaultScanLabelGeneratorStack {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = TEST_UTIL.createTable(tableName, CF)) {
Put put = new Put(ROW_1);
- put.add(CF, Q1, HConstants.LATEST_TIMESTAMP, value1);
+ put.addColumn(CF, Q1, HConstants.LATEST_TIMESTAMP, value1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q2, HConstants.LATEST_TIMESTAMP, value2);
+ put.addColumn(CF, Q2, HConstants.LATEST_TIMESTAMP, value2);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q3, HConstants.LATEST_TIMESTAMP, value3);
+ put.addColumn(CF, Q3, HConstants.LATEST_TIMESTAMP, value3);
table.put(put);
return null;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
index 45c8ef5..a0703fc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestEnforcingScanLabelGenerator.java
@@ -103,15 +103,15 @@ public class TestEnforcingScanLabelGenerator {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = TEST_UTIL.createTable(tableName, CF)) {
Put put = new Put(ROW_1);
- put.add(CF, Q1, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(CF, Q1, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q2, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(CF, Q2, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q3, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(CF, Q3, HConstants.LATEST_TIMESTAMP, value);
table.put(put);
return null;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index e885983..ab2bacc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -575,12 +575,12 @@ public abstract class TestVisibilityLabels {
try (Table table = TEST_UTIL.createTable(tableName, fam)) {
byte[] row1 = Bytes.toBytes("row1");
Put put = new Put(row1);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL));
table.checkAndPut(row1, fam, qual, null, put);
byte[] row2 = Bytes.toBytes("row2");
put = new Put(row2);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.checkAndPut(row2, fam, qual, null, put);
@@ -602,7 +602,7 @@ public abstract class TestVisibilityLabels {
byte[] row1 = Bytes.toBytes("row1");
byte[] val = Bytes.toBytes(1L);
Put put = new Put(row1);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, val);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, val);
put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL));
table.put(put);
Get get = new Get(row1);
@@ -628,7 +628,7 @@ public abstract class TestVisibilityLabels {
byte[] row1 = Bytes.toBytes("row1");
byte[] val = Bytes.toBytes("a");
Put put = new Put(row1);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, val);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, val);
put.setCellVisibility(new CellVisibility(SECRET + " & " + CONFIDENTIAL));
table.put(put);
Get get = new Get(row1);
@@ -708,32 +708,32 @@ public abstract class TestVisibilityLabels {
TEST_UTIL.getHBaseAdmin().createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(r1);
- put.add(fam, qual, 3l, v1);
- put.add(fam, qual2, 3l, v1);
- put.add(fam2, qual, 3l, v1);
- put.add(fam2, qual2, 3l, v1);
+ put.addColumn(fam, qual, 3l, v1);
+ put.addColumn(fam, qual2, 3l, v1);
+ put.addColumn(fam2, qual, 3l, v1);
+ put.addColumn(fam2, qual2, 3l, v1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(r1);
- put.add(fam, qual, 4l, v2);
- put.add(fam, qual2, 4l, v2);
- put.add(fam2, qual, 4l, v2);
- put.add(fam2, qual2, 4l, v2);
+ put.addColumn(fam, qual, 4l, v2);
+ put.addColumn(fam, qual2, 4l, v2);
+ put.addColumn(fam2, qual, 4l, v2);
+ put.addColumn(fam2, qual2, 4l, v2);
put.setCellVisibility(new CellVisibility(PRIVATE));
table.put(put);
put = new Put(r2);
- put.add(fam, qual, 3l, v1);
- put.add(fam, qual2, 3l, v1);
- put.add(fam2, qual, 3l, v1);
- put.add(fam2, qual2, 3l, v1);
+ put.addColumn(fam, qual, 3l, v1);
+ put.addColumn(fam, qual2, 3l, v1);
+ put.addColumn(fam2, qual, 3l, v1);
+ put.addColumn(fam2, qual2, 3l, v1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(r2);
- put.add(fam, qual, 4l, v2);
- put.add(fam, qual2, 4l, v2);
- put.add(fam2, qual, 4l, v2);
- put.add(fam2, qual2, 4l, v2);
+ put.addColumn(fam, qual, 4l, v2);
+ put.addColumn(fam, qual2, 4l, v2);
+ put.addColumn(fam2, qual, 4l, v2);
+ put.addColumn(fam2, qual2, 4l, v2);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
@@ -792,11 +792,11 @@ public abstract class TestVisibilityLabels {
TEST_UTIL.getHBaseAdmin().createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)){
Put p1 = new Put(row1);
- p1.add(fam, qual, value);
+ p1.addColumn(fam, qual, value);
p1.setCellVisibility(new CellVisibility(CONFIDENTIAL));
Put p2 = new Put(row1);
- p2.add(fam, qual2, value);
+ p2.addColumn(fam, qual2, value);
p2.setCellVisibility(new CellVisibility(SECRET));
RowMutations rm = new RowMutations(row1);
@@ -828,11 +828,11 @@ public abstract class TestVisibilityLabels {
TEST_UTIL.getHBaseAdmin().createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put p1 = new Put(row1);
- p1.add(fam, qual, value);
+ p1.addColumn(fam, qual, value);
p1.setCellVisibility(new CellVisibility(CONFIDENTIAL));
Put p2 = new Put(row1);
- p2.add(fam, qual2, value);
+ p2.addColumn(fam, qual2, value);
p2.setCellVisibility(new CellVisibility(SECRET));
RowMutations rm = new RowMutations(row1);
@@ -856,7 +856,7 @@ public abstract class TestVisibilityLabels {
List<Put> puts = new ArrayList<Put>();
for (int i = 0; i < labelExps.length; i++) {
Put put = new Put(Bytes.toBytes("row" + (i+1)));
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(labelExps[i]));
puts.add(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
index 7fa240e..00186c1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java
@@ -381,7 +381,7 @@ public class TestVisibilityLabelsReplication {
List<Put> puts = new ArrayList<Put>();
for (String labelExp : labelExps) {
Put put = new Put(Bytes.toBytes("row" + i));
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(labelExp));
put.setAttribute(NON_VISIBILITY, Bytes.toBytes(TEMP));
puts.add(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
index a6192fc..f67296d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java
@@ -318,7 +318,7 @@ public class TestVisibilityLabelsWithACL {
List<Put> puts = new ArrayList<Put>();
for (String labelExp : labelExps) {
Put put = new Put(Bytes.toBytes("row" + i));
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(labelExp));
puts.add(put);
i++;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
index 2e549b2..c67d869 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
@@ -480,11 +480,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, value);
+ put.addColumn(fam, qual, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, value);
+ put.addColumn(fam, qual, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -511,7 +511,7 @@ public class TestVisibilityLabelsWithDeletes {
Result[] next = scanner.next(3);
assertEquals(next.length, 1);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, value1);
+ put.addColumn(fam, qual, value1);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
actiona = new PrivilegedExceptionAction<Void>() {
@@ -556,11 +556,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, value);
+ put.addColumn(fam, qual, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, value);
+ put.addColumn(fam, qual, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -587,7 +587,7 @@ public class TestVisibilityLabelsWithDeletes {
Result[] next = scanner.next(3);
assertEquals(next.length, 1);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, value1);
+ put.addColumn(fam, qual, value1);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
actiona = new PrivilegedExceptionAction<Void>() {
@@ -632,11 +632,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 124l, value1);
+ put.addColumn(fam, qual, 124l, value1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -686,11 +686,11 @@ public class TestVisibilityLabelsWithDeletes {
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put1 = new Put(Bytes.toBytes("row1"));
- put1.add(fam, qual, 123l, value);
+ put1.addColumn(fam, qual, 123l, value);
put1.setCellVisibility(new CellVisibility(CONFIDENTIAL));
Put put2 = new Put(Bytes.toBytes("row1"));
- put2.add(fam, qual, 123l, value1);
+ put2.addColumn(fam, qual, 123l, value1);
put2.setCellVisibility(new CellVisibility(SECRET));
table.put(createList(put1, put2));
@@ -861,36 +861,36 @@ public class TestVisibilityLabelsWithDeletes {
List<Put> puts = new ArrayList<Put>();
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124l, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 125l, value);
+ put.addColumn(fam, qual, 125l, value);
put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 126l, value);
+ put.addColumn(fam, qual, 126l, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127l, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
put = new Put(Bytes.toBytes("row2"));
- put.add(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127l, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|(" + TOPSECRET
+ "&" + SECRET + ")"));
puts.add(put);
@@ -911,28 +911,28 @@ public class TestVisibilityLabelsWithDeletes {
List<Put> puts = new ArrayList<>();
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124l, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 125l, value);
+ put.addColumn(fam, qual, 125l, value);
put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual1, 126l, value);
+ put.addColumn(fam, qual1, 126l, value);
put.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual2, 127l, value);
+ put.addColumn(fam, qual2, 127l, value);
put.setCellVisibility(new CellVisibility("(" + CONFIDENTIAL + "&" + PRIVATE + ")|("
+ TOPSECRET + "&" + SECRET+")"));
puts.add(put);
@@ -952,23 +952,23 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
List<Put> puts = new ArrayList<>();
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123l, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124l, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 125l, value);
+ put.addColumn(fam, qual, 125l, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 126l, value);
+ put.addColumn(fam, qual, 126l, value);
puts.add(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127l, value);
puts.add(put);
Table table = TEST_UTIL.getConnection().getTable(tableName);
@@ -977,7 +977,7 @@ public class TestVisibilityLabelsWithDeletes {
TEST_UTIL.getHBaseAdmin().flush(tableName);
put = new Put(Bytes.toBytes("row2"));
- put.add(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127l, value);
table.put(put);
return table;
@@ -1116,7 +1116,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Table table = doPuts(tableName)) {
TEST_UTIL.getHBaseAdmin().flush(tableName);
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 128l, value);
+ put.addColumn(fam, qual, 128l, value);
put.setCellVisibility(new CellVisibility(TOPSECRET));
table.put(put);
PrivilegedExceptionAction<Void> actiona = new PrivilegedExceptionAction<Void>() {
@@ -1176,7 +1176,7 @@ public class TestVisibilityLabelsWithDeletes {
current.getRowLength(), row2, 0, row2.length));
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 129l, value);
+ put.addColumn(fam, qual, 129l, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
@@ -1220,7 +1220,7 @@ public class TestVisibilityLabelsWithDeletes {
SUPERUSER.runAs(actiona);
TEST_UTIL.getHBaseAdmin().flush(tableName);
Put put = new Put(Bytes.toBytes("row3"));
- put.add(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "&" + PRIVATE));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -1380,11 +1380,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual1, 125l, value);
+ put.addColumn(fam, qual1, 125l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual1, 126l, value);
+ put.addColumn(fam, qual1, 126l, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -1428,11 +1428,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual1, 125l, value);
+ put.addColumn(fam, qual1, 125l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual1, 126l, value);
+ put.addColumn(fam, qual1, 126l, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -1647,7 +1647,7 @@ public class TestVisibilityLabelsWithDeletes {
TEST_UTIL.getHBaseAdmin().flush(tableName);
Put put = new Put(Bytes.toBytes("row3"));
- put.add(fam, qual, 127l, value);
+ put.addColumn(fam, qual, 127l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "&" + PRIVATE));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -2782,11 +2782,11 @@ public class TestVisibilityLabelsWithDeletes {
hBaseAdmin.createTable(desc);
try (Table table = TEST_UTIL.getConnection().getTable(tableName)) {
Put put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 123l, value);
+ put.addColumn(fam, qual, 123l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(Bytes.toBytes("row1"));
- put.add(fam, qual, 124l, value);
+ put.addColumn(fam, qual, 124l, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL + "|" + PRIVATE));
table.put(put);
TEST_UTIL.getHBaseAdmin().flush(tableName);
@@ -2941,7 +2941,7 @@ public class TestVisibilityLabelsWithDeletes {
List<Put> puts = new ArrayList<Put>();
for (String labelExp : labelExps) {
Put put = new Put(Bytes.toBytes("row" + i));
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(labelExp));
puts.add(put);
table.put(put);
@@ -2959,7 +2959,7 @@ public class TestVisibilityLabelsWithDeletes {
List<Put> puts = new ArrayList<Put>();
for (String labelExp : labelExps) {
Put put = new Put(Bytes.toBytes("row" + i));
- put.add(fam, qual, timestamp[i - 1], value);
+ put.addColumn(fam, qual, timestamp[i - 1], value);
put.setCellVisibility(new CellVisibility(labelExp));
puts.add(put);
table.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
index c48b4c4..b6a1c6d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithSLGStack.java
@@ -86,11 +86,11 @@ public class TestVisibilityLabelsWithSLGStack {
TableName tableName = TableName.valueOf(TEST_NAME.getMethodName());
try (Table table = TEST_UTIL.createTable(tableName, CF)) {
Put put = new Put(ROW_1);
- put.add(CF, Q1, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(CF, Q1, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q2, HConstants.LATEST_TIMESTAMP, value);
+ put.addColumn(CF, Q2, HConstants.LATEST_TIMESTAMP, value);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
index 6c1a47b..1410c78 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java
@@ -120,15 +120,15 @@ public class TestVisibilityLablesWithGroups {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Put put = new Put(ROW_1);
- put.add(CF, Q1, HConstants.LATEST_TIMESTAMP, value1);
+ put.addColumn(CF, Q1, HConstants.LATEST_TIMESTAMP, value1);
put.setCellVisibility(new CellVisibility(SECRET));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q2, HConstants.LATEST_TIMESTAMP, value2);
+ put.addColumn(CF, Q2, HConstants.LATEST_TIMESTAMP, value2);
put.setCellVisibility(new CellVisibility(CONFIDENTIAL));
table.put(put);
put = new Put(ROW_1);
- put.add(CF, Q3, HConstants.LATEST_TIMESTAMP, value3);
+ put.addColumn(CF, Q3, HConstants.LATEST_TIMESTAMP, value3);
table.put(put);
}
return null;
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
index 0b0d9d0..96203b5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityWithCheckAuths.java
@@ -137,7 +137,7 @@ public class TestVisibilityWithCheckAuths {
Table table = connection.getTable(tableName)) {
Put p = new Put(row1);
p.setCellVisibility(new CellVisibility(PUBLIC + "&" + TOPSECRET));
- p.add(fam, qual, 125l, value);
+ p.addColumn(fam, qual, 125l, value);
table.put(p);
Assert.fail("Testcase should fail with AccesDeniedException");
} catch (Throwable t) {
@@ -177,7 +177,7 @@ public class TestVisibilityWithCheckAuths {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Put put = new Put(row1);
- put.add(fam, qual, HConstants.LATEST_TIMESTAMP, val);
+ put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, val);
put.setCellVisibility(new CellVisibility(TOPSECRET));
table.put(put);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
index 8a22f37..39c8f21 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
@@ -710,7 +710,7 @@ public class SnapshotTestingUtils {
Put put = new Put(key);
put.setDurability(Durability.SKIP_WAL);
for (byte[] family: families) {
- put.add(family, q, value);
+ put.addColumn(family, q, value);
}
return put;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
index 9167c7d..205f1d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java
@@ -42,7 +42,6 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-
@Category({MiscTests.class, MediumTests.class})
public class TestHTraceHooks {
@@ -117,7 +116,7 @@ public class TestHTraceHooks {
assertTrue(spans.size() > 5);
Put put = new Put("row".getBytes());
- put.add(FAMILY_BYTES, "col".getBytes(), "value".getBytes());
+ put.addColumn(FAMILY_BYTES, "col".getBytes(), "value".getBytes());
TraceScope putSpan = Trace.startSpan("doing put", Sampler.ALWAYS);
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
index 8e8bb41..e91d0e1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/BaseTestHBaseFsck.java
@@ -90,7 +90,6 @@ import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
import org.apache.hadoop.hbase.util.HBaseFsck.HbckInfo;
import org.apache.hadoop.hbase.util.HBaseFsck.TableInfo;
import org.apache.hadoop.hbase.util.hbck.HFileCorruptionChecker;
-import org.apache.hadoop.hbase.util.hbck.HbckTestingUtil;
import org.apache.zookeeper.KeeperException;
import org.junit.Assert;
import org.junit.Before;
@@ -98,7 +97,6 @@ import org.junit.Ignore;
import org.junit.Test;
import org.junit.rules.TestName;
-
/**
* This is the base class for HBaseFsck's ability to detect reasons for inconsistent tables.
*
@@ -313,7 +311,7 @@ public class BaseTestHBaseFsck {
List<Put> puts = new ArrayList<Put>();
for (byte[] row : ROWKEYS) {
Put p = new Put(row);
- p.add(FAM, Bytes.toBytes("val"), row);
+ p.addColumn(FAM, Bytes.toBytes("val"), row);
puts.add(p);
}
tbl.put(puts);
@@ -468,7 +466,7 @@ public class BaseTestHBaseFsck {
tbl = connection.getTable(desc.getTableName());
for (int i = 0; i < 5; i++) {
Put p1 = new Put(("r" + i).getBytes());
- p1.add(Bytes.toBytes("f"), "q1".getBytes(), "v".getBytes());
+ p1.addColumn(Bytes.toBytes("f"), "q1".getBytes(), "v".getBytes());
tbl.put(p1);
}
admin.flush(desc.getTableName());
[8/8] hbase git commit: HBASE-14673 Exorcise deprecated
Delete#delete* api
Posted by jm...@apache.org.
HBASE-14673 Exorcise deprecated Delete#delete* api
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/094d65e6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/094d65e6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/094d65e6
Branch: refs/heads/master
Commit: 094d65e6f52f5b3cb1210c4abbea2fb14bcbdf15
Parents: de9555c
Author: Jonathan M Hsieh <jm...@apache.org>
Authored: Wed Oct 21 16:35:50 2015 -0700
Committer: Jonathan M Hsieh <jm...@apache.org>
Committed: Thu Oct 29 11:15:34 2015 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/client/Delete.java | 100 +------------------
.../hadoop/hbase/protobuf/ProtobufUtil.java | 8 +-
.../hbase/client/TestDeleteTimeStamp.java | 2 +-
.../hadoop/hbase/client/TestOperation.java | 2 +-
.../coprocessor/example/BulkDeleteEndpoint.java | 8 +-
...egrationTestBigLinkedListWithVisibility.java | 2 +-
.../apache/hadoop/hbase/rest/RowResource.java | 14 +--
.../hbase/rest/TestScannersWithFilters.java | 16 +--
.../apache/hadoop/hbase/quotas/QuotaUtil.java | 2 +-
.../org/apache/hadoop/hbase/util/HBaseFsck.java | 4 +-
.../hadoop/hbase/HBaseTestingUtility.java | 6 +-
.../hadoop/hbase/client/TestFromClientSide.java | 98 +++++++++---------
.../hadoop/hbase/client/TestMultiParallel.java | 4 +-
.../hbase/client/TestMultipleTimestamps.java | 8 +-
.../client/TestPutDeleteEtcCellIteration.java | 2 +-
.../client/TestScannersFromClientSide.java | 2 +-
.../hbase/client/TestTimestampsFilter.java | 2 +-
.../TestRegionObserverInterface.java | 12 +--
.../apache/hadoop/hbase/filter/TestFilter.java | 16 +--
.../hbase/mapreduce/TestImportExport.java | 2 +-
.../TestImportTSVWithVisibilityLabels.java | 2 +-
.../hadoop/hbase/mapreduce/TestWALPlayer.java | 2 +-
.../TestMasterOperationsForRegionReplicas.java | 6 +-
.../hbase/regionserver/TestAtomicOperation.java | 8 +-
.../hbase/regionserver/TestBlocksRead.java | 6 +-
.../hbase/regionserver/TestCompaction.java | 2 +-
.../hadoop/hbase/regionserver/TestHRegion.java | 34 +++----
.../hbase/regionserver/TestKeepDeletes.java | 48 ++++-----
.../hbase/regionserver/TestMajorCompaction.java | 2 +-
.../hbase/regionserver/TestMinorCompaction.java | 10 +-
.../regionserver/TestMultiColumnScanner.java | 2 +-
.../hadoop/hbase/regionserver/TestScanner.java | 2 +-
.../regionserver/TestSeekOptimizations.java | 4 +-
.../replication/TestPerTableCFReplication.java | 2 +-
.../replication/TestReplicationSmallTests.java | 4 +-
.../security/access/TestAccessController.java | 14 +--
.../access/TestCellACLWithMultipleVersions.java | 18 ++--
.../hbase/security/access/TestCellACLs.java | 2 +-
.../ExpAsStringVisibilityLabelServiceImpl.java | 2 +-
.../TestVisibilityLabelsWithDeletes.java | 12 +--
.../hadoop/hbase/util/MultiThreadedUpdater.java | 2 +-
hbase-shell/src/main/ruby/hbase/table.rb | 2 +-
.../hadoop/hbase/thrift/ThriftServerRunner.java | 12 +--
.../hadoop/hbase/thrift2/ThriftUtilities.java | 12 +--
44 files changed, 212 insertions(+), 308 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index 86c2462..1e4f79f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -178,19 +178,6 @@ public class Delete extends Mutation implements Comparable<Row> {
return this;
}
- /**
- * Delete all versions of all columns of the specified family.
- * <p>
- * Overrides previous calls to deleteColumn and deleteColumns for the
- * specified family.
- * @param family family name
- * @return this for invocation chaining
- * @deprecated Since 1.0.0. Use {@link #addFamily(byte[])}
- */
- @Deprecated
- public Delete deleteFamily(byte [] family) {
- return addFamily(family);
- }
/**
* Delete all versions of all columns of the specified family.
@@ -201,7 +188,7 @@ public class Delete extends Mutation implements Comparable<Row> {
* @return this for invocation chaining
*/
public Delete addFamily(final byte [] family) {
- this.deleteFamily(family, this.ts);
+ this.addFamily(family, this.ts);
return this;
}
@@ -214,22 +201,6 @@ public class Delete extends Mutation implements Comparable<Row> {
* @param family family name
* @param timestamp maximum version timestamp
* @return this for invocation chaining
- * @deprecated Since 1.0.0. Use {@link #addFamily(byte[], long)}
- */
- @Deprecated
- public Delete deleteFamily(byte [] family, long timestamp) {
- return addFamily(family, timestamp);
- }
-
- /**
- * Delete all columns of the specified family with a timestamp less than
- * or equal to the specified timestamp.
- * <p>
- * Overrides previous calls to deleteColumn and deleteColumns for the
- * specified family.
- * @param family family name
- * @param timestamp maximum version timestamp
- * @return this for invocation chaining
*/
public Delete addFamily(final byte [] family, final long timestamp) {
if (timestamp < 0) {
@@ -253,19 +224,6 @@ public class Delete extends Mutation implements Comparable<Row> {
* @param family family name
* @param timestamp version timestamp
* @return this for invocation chaining
- * @deprecated Since hbase-1.0.0. Use {@link #addFamilyVersion(byte[], long)}
- */
- @Deprecated
- public Delete deleteFamilyVersion(byte [] family, long timestamp) {
- return addFamilyVersion(family, timestamp);
- }
-
- /**
- * Delete all columns of the specified family with a timestamp equal to
- * the specified timestamp.
- * @param family family name
- * @param timestamp version timestamp
- * @return this for invocation chaining
*/
public Delete addFamilyVersion(final byte [] family, final long timestamp) {
List<Cell> list = familyMap.get(family);
@@ -283,18 +241,6 @@ public class Delete extends Mutation implements Comparable<Row> {
* @param family family name
* @param qualifier column qualifier
* @return this for invocation chaining
- * @deprecated Since hbase-1.0.0. Use {@link #addColumns(byte[], byte[])}
- */
- @Deprecated
- public Delete deleteColumns(byte [] family, byte [] qualifier) {
- return addColumns(family, qualifier);
- }
-
- /**
- * Delete all versions of the specified column.
- * @param family family name
- * @param qualifier column qualifier
- * @return this for invocation chaining
*/
public Delete addColumns(final byte [] family, final byte [] qualifier) {
addColumns(family, qualifier, this.ts);
@@ -308,20 +254,6 @@ public class Delete extends Mutation implements Comparable<Row> {
* @param qualifier column qualifier
* @param timestamp maximum version timestamp
* @return this for invocation chaining
- * @deprecated Since hbase-1.0.0. Use {@link #addColumns(byte[], byte[], long)}
- */
- @Deprecated
- public Delete deleteColumns(byte [] family, byte [] qualifier, long timestamp) {
- return addColumns(family, qualifier, timestamp);
- }
-
- /**
- * Delete all versions of the specified column with a timestamp less than
- * or equal to the specified timestamp.
- * @param family family name
- * @param qualifier column qualifier
- * @param timestamp maximum version timestamp
- * @return this for invocation chaining
*/
public Delete addColumns(final byte [] family, final byte [] qualifier, final long timestamp) {
if (timestamp < 0) {
@@ -345,24 +277,9 @@ public class Delete extends Mutation implements Comparable<Row> {
* @param family family name
* @param qualifier column qualifier
* @return this for invocation chaining
- * @deprecated Since hbase-1.0.0. Use {@link #addColumn(byte[], byte[])}
- */
- @Deprecated
- public Delete deleteColumn(byte [] family, byte [] qualifier) {
- return addColumn(family, qualifier);
- }
-
- /**
- * Delete the latest version of the specified column.
- * This is an expensive call in that on the server-side, it first does a
- * get to find the latest versions timestamp. Then it adds a delete using
- * the fetched cells timestamp.
- * @param family family name
- * @param qualifier column qualifier
- * @return this for invocation chaining
*/
public Delete addColumn(final byte [] family, final byte [] qualifier) {
- this.deleteColumn(family, qualifier, this.ts);
+ this.addColumn(family, qualifier, this.ts);
return this;
}
@@ -372,19 +289,6 @@ public class Delete extends Mutation implements Comparable<Row> {
* @param qualifier column qualifier
* @param timestamp version timestamp
* @return this for invocation chaining
- * @deprecated Since hbase-1.0.0. Use {@link #addColumn(byte[], byte[], long)}
- */
- @Deprecated
- public Delete deleteColumn(byte [] family, byte [] qualifier, long timestamp) {
- return addColumn(family, qualifier, timestamp);
- }
-
- /**
- * Delete the specified version of the specified column.
- * @param family family name
- * @param qualifier column qualifier
- * @param timestamp version timestamp
- * @return this for invocation chaining
*/
public Delete addColumn(byte [] family, byte [] qualifier, long timestamp) {
if (timestamp < 0) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 15c5675..0d9c73b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -681,13 +681,13 @@ public final class ProtobufUtil {
ts = qv.getTimestamp();
}
if (deleteType == DeleteType.DELETE_ONE_VERSION) {
- delete.deleteColumn(family, qualifier, ts);
+ delete.addColumn(family, qualifier, ts);
} else if (deleteType == DeleteType.DELETE_MULTIPLE_VERSIONS) {
- delete.deleteColumns(family, qualifier, ts);
+ delete.addColumns(family, qualifier, ts);
} else if (deleteType == DeleteType.DELETE_FAMILY_VERSION) {
- delete.deleteFamilyVersion(family, ts);
+ delete.addFamilyVersion(family, ts);
} else {
- delete.deleteFamily(family, ts);
+ delete.addFamily(family, ts);
}
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java
index e3582c1..debf2bd 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestDeleteTimeStamp.java
@@ -38,7 +38,7 @@ public class TestDeleteTimeStamp {
long ts = 2014L;
Delete delete = new Delete(ROW);
delete.setTimestamp(ts);
- delete.deleteColumn(FAMILY, QUALIFIER);
+ delete.addColumn(FAMILY, QUALIFIER);
NavigableMap<byte[], List<Cell>> familyCellmap = delete.getFamilyCellMap();
for (Entry<byte[], List<Cell>> entry : familyCellmap.entrySet()) {
for (Cell cell : entry.getValue()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
index 13a2567..fa9c4ad 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
@@ -358,7 +358,7 @@ public class TestOperation {
// produce a Delete operation
Delete delete = new Delete(ROW);
- delete.deleteColumn(FAMILY, QUALIFIER);
+ delete.addColumn(FAMILY, QUALIFIER);
// get its JSON representation, and parse it
json = delete.toJSON();
parsedJSON = mapper.readValue(json, HashMap.class);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
index 1515dc1..c9ab23c 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
@@ -205,7 +205,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
Set<byte[]> families = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
for (Cell kv : deleteRow) {
if (families.add(CellUtil.cloneFamily(kv))) {
- delete.deleteFamily(CellUtil.cloneFamily(kv), ts);
+ delete.addFamily(CellUtil.cloneFamily(kv), ts);
}
}
} else if (deleteType == DeleteType.COLUMN) {
@@ -216,7 +216,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
// Making deleteColumns() calls more than once for the same cf:qualifier is not correct
// Every call to deleteColumns() will add a new KV to the familymap which will finally
// get written to the memstore as part of delete().
- delete.deleteColumns(column.family, column.qualifier, ts);
+ delete.addColumns(column.family, column.qualifier, ts);
}
}
} else if (deleteType == DeleteType.VERSION) {
@@ -227,7 +227,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
int noOfVersionsToDelete = 0;
if (timestamp == null) {
for (Cell kv : deleteRow) {
- delete.deleteColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp());
+ delete.addColumn(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv), kv.getTimestamp());
noOfVersionsToDelete++;
}
} else {
@@ -236,7 +236,7 @@ public class BulkDeleteEndpoint extends BulkDeleteService implements Coprocessor
Column column = new Column(CellUtil.cloneFamily(kv), CellUtil.cloneQualifier(kv));
// Only one version of particular column getting deleted.
if (columns.add(column)) {
- delete.deleteColumn(column.family, column.qualifier, ts);
+ delete.addColumn(column.family, column.qualifier, ts);
noOfVersionsToDelete++;
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
index c908474..2a146b3 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
@@ -345,7 +345,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
delete = new Delete(key.get());
}
delete.setCellVisibility(new CellVisibility(visibilityExps));
- delete.deleteFamily(CellUtil.cloneFamily(kv));
+ delete.addFamily(CellUtil.cloneFamily(kv));
}
if (delete != null) {
context.write(key, delete);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index ff1345c..f922343 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -372,9 +372,9 @@ public class RowResource extends ResourceBase {
byte[][] split = KeyValue.parseColumn(column);
if (rowspec.hasTimestamp()) {
if (split.length == 1) {
- delete.deleteFamily(split[0], rowspec.getTimestamp());
+ delete.addFamily(split[0], rowspec.getTimestamp());
} else if (split.length == 2) {
- delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
+ delete.addColumns(split[0], split[1], rowspec.getTimestamp());
} else {
return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
@@ -382,9 +382,9 @@ public class RowResource extends ResourceBase {
}
} else {
if (split.length == 1) {
- delete.deleteFamily(split[0]);
+ delete.addFamily(split[0]);
} else if (split.length == 2) {
- delete.deleteColumns(split[0], split[1]);
+ delete.addColumns(split[0], split[1]);
} else {
return Response.status(Response.Status.BAD_REQUEST)
.type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
@@ -550,12 +550,12 @@ public class RowResource extends ResourceBase {
byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn);
if (parts.length == 2) {
if (parts[1].length != 0) {
- delete.deleteColumns(parts[0], parts[1]);
+ delete.addColumns(parts[0], parts[1]);
retValue = table.checkAndDelete(key, parts[0], parts[1],
valueToDeleteCell.getValue(), delete);
} else {
// The case of empty qualifier.
- delete.deleteColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY));
+ delete.addColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY));
retValue = table.checkAndDelete(key, parts[0], Bytes.toBytes(StringUtils.EMPTY),
valueToDeleteCell.getValue(), delete);
}
@@ -565,7 +565,7 @@ public class RowResource extends ResourceBase {
.type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF)
.build();
}
- delete.deleteColumns(parts[0], parts[1]);
+ delete.addColumns(parts[0], parts[1]);
if (LOG.isDebugEnabled()) {
LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index 9b68806..eed4f1a 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -183,14 +183,14 @@ public class TestScannersWithFilters {
// Delete the second qualifier from all rows and families
for(byte [] ROW : ROWS_ONE) {
Delete d = new Delete(ROW);
- d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
- d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
+ d.addColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
+ d.addColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
table.delete(d);
}
for(byte [] ROW : ROWS_TWO) {
Delete d = new Delete(ROW);
- d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
- d.deleteColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
+ d.addColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
+ d.addColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
table.delete(d);
}
colsPerRow -= 2;
@@ -198,14 +198,14 @@ public class TestScannersWithFilters {
// Delete the second rows from both groups, one column at a time
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
Delete d = new Delete(ROWS_ONE[1]);
- d.deleteColumns(FAMILIES[0], QUALIFIER);
- d.deleteColumns(FAMILIES[1], QUALIFIER);
+ d.addColumns(FAMILIES[0], QUALIFIER);
+ d.addColumns(FAMILIES[1], QUALIFIER);
table.delete(d);
}
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
Delete d = new Delete(ROWS_TWO[1]);
- d.deleteColumns(FAMILIES[0], QUALIFIER);
- d.deleteColumns(FAMILIES[1], QUALIFIER);
+ d.addColumns(FAMILIES[0], QUALIFIER);
+ d.addColumns(FAMILIES[1], QUALIFIER);
table.delete(d);
}
numRows -= 2;
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
index 6d219e1..af64c66 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaUtil.java
@@ -157,7 +157,7 @@ public class QuotaUtil extends QuotaTableUtil {
final byte[] qualifier) throws IOException {
Delete delete = new Delete(rowKey);
if (qualifier != null) {
- delete.deleteColumns(QUOTA_FAMILY_INFO, qualifier);
+ delete.addColumns(QUOTA_FAMILY_INFO, qualifier);
}
doDelete(connection, delete);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 88c5427..bc3da96 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -2012,8 +2012,8 @@ public class HBaseFsck extends Configured implements Closeable {
private void resetSplitParent(HbckInfo hi) throws IOException {
RowMutations mutations = new RowMutations(hi.metaEntry.getRegionName());
Delete d = new Delete(hi.metaEntry.getRegionName());
- d.deleteColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
- d.deleteColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
+ d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
+ d.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
mutations.add(d);
HRegionInfo hri = new HRegionInfo(hi.metaEntry);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 6f02ab0..ff17065 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -2133,7 +2133,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
for (int i = startRow; i < endRow; i++) {
byte[] data = Bytes.toBytes(String.valueOf(i));
Delete delete = new Delete(data);
- delete.deleteFamily(f);
+ delete.addFamily(f);
t.delete(delete);
}
}
@@ -3418,9 +3418,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
ts + "_random_" + rand.nextLong());
put.addColumn(cf, qual, ts, value);
} else if (rand.nextDouble() < 0.8) {
- del.deleteColumn(cf, qual, ts);
+ del.addColumn(cf, qual, ts);
} else {
- del.deleteColumns(cf, qual, ts);
+ del.addColumns(cf, qual, ts);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 7901b81..9c9ec87 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -206,7 +206,7 @@ public class TestFromClientSide {
h.delete(d);
d = new Delete(T1, ts+3);
- d.deleteColumns(FAMILY, C0, ts+3);
+ d.addColumns(FAMILY, C0, ts+3);
h.delete(d);
Get g = new Get(T1);
@@ -262,7 +262,7 @@ public class TestFromClientSide {
assertArrayEquals(VALUE, result.getValue(FAMILY, COLUMN));
Delete del = new Delete(ROW);
- del.deleteColumn(FAMILY, COLUMN, ts);
+ del.addColumn(FAMILY, COLUMN, ts);
table.delete(del);
get = new Get(ROW);
@@ -360,7 +360,7 @@ public class TestFromClientSide {
while (it.hasNext()) {
Result result = it.next();
Delete delete = new Delete(result.getRow());
- delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
+ delete.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
ht.delete(delete);
count++;
}
@@ -933,7 +933,7 @@ public class TestFromClientSide {
// Delete a storefile column
////////////////////////////////////////////////////////////////////////////
delete = new Delete(ROWS[0]);
- delete.deleteColumns(FAMILIES[6], QUALIFIERS[7]);
+ delete.addColumns(FAMILIES[6], QUALIFIERS[7]);
ht.delete(delete);
// Try to get deleted column
@@ -974,7 +974,7 @@ public class TestFromClientSide {
// Delete a memstore column
////////////////////////////////////////////////////////////////////////////
delete = new Delete(ROWS[0]);
- delete.deleteColumns(FAMILIES[6], QUALIFIERS[8]);
+ delete.addColumns(FAMILIES[6], QUALIFIERS[8]);
ht.delete(delete);
// Try to get deleted column
@@ -1016,7 +1016,7 @@ public class TestFromClientSide {
////////////////////////////////////////////////////////////////////////////
delete = new Delete(ROWS[0]);
- delete.deleteFamily(FAMILIES[4]);
+ delete.addFamily(FAMILIES[4]);
ht.delete(delete);
// Try to get storefile column in deleted family
@@ -1180,7 +1180,7 @@ public class TestFromClientSide {
scanTestNull(ht, ROW, FAMILY, VALUE);
Delete delete = new Delete(ROW);
- delete.deleteColumns(FAMILY, null);
+ delete.addColumns(FAMILY, null);
ht.delete(delete);
Get get = new Get(ROW);
@@ -1211,7 +1211,7 @@ public class TestFromClientSide {
scanTestNull(ht, ROW, FAMILY, VALUE);
Delete delete = new Delete(ROW);
- delete.deleteColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY);
+ delete.addColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY);
ht.delete(delete);
Get get = new Get(ROW);
@@ -1239,7 +1239,7 @@ public class TestFromClientSide {
assertSingleResult(result, ROW, FAMILY, QUALIFIER, null);
Delete delete = new Delete(ROW);
- delete.deleteColumns(FAMILY, QUALIFIER);
+ delete.addColumns(FAMILY, QUALIFIER);
ht.delete(delete);
get = new Get(ROW);
@@ -1440,8 +1440,8 @@ public class TestFromClientSide {
// Delete a version in the memstore and a version in a storefile
Delete delete = new Delete(ROW);
- delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[11]);
- delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[7]);
+ delete.addColumn(FAMILY, QUALIFIER, STAMPS[11]);
+ delete.addColumn(FAMILY, QUALIFIER, STAMPS[7]);
ht.delete(delete);
// Test that it's gone
@@ -1678,8 +1678,8 @@ public class TestFromClientSide {
admin.flush(TABLE);
Delete delete = new Delete(ROW);
- delete.deleteFamilyVersion(FAMILY, ts[1]); // delete version '2000'
- delete.deleteFamilyVersion(FAMILY, ts[3]); // delete version '4000'
+ delete.addFamilyVersion(FAMILY, ts[1]); // delete version '2000'
+ delete.addFamilyVersion(FAMILY, ts[3]); // delete version '4000'
ht.delete(delete);
admin.flush(TABLE);
@@ -1733,24 +1733,24 @@ public class TestFromClientSide {
// 3. delete on ROW
delete = new Delete(ROW);
// delete version <= 2000 of all columns
- // note: deleteFamily must be the first since it will mask
+ // note: addFamily must be the first since it will mask
// the subsequent other type deletes!
- delete.deleteFamily(FAMILY, ts[1]);
+ delete.addFamily(FAMILY, ts[1]);
// delete version '4000' of all columns
- delete.deleteFamilyVersion(FAMILY, ts[3]);
+ delete.addFamilyVersion(FAMILY, ts[3]);
// delete version <= 3000 of column 0
- delete.deleteColumns(FAMILY, QUALIFIERS[0], ts[2]);
+ delete.addColumns(FAMILY, QUALIFIERS[0], ts[2]);
// delete version <= 5000 of column 2
- delete.deleteColumns(FAMILY, QUALIFIERS[2], ts[4]);
+ delete.addColumns(FAMILY, QUALIFIERS[2], ts[4]);
// delete version 5000 of column 4
- delete.deleteColumn(FAMILY, QUALIFIERS[4], ts[4]);
+ delete.addColumn(FAMILY, QUALIFIERS[4], ts[4]);
ht.delete(delete);
admin.flush(TABLE);
// 4. delete on ROWS[0]
delete = new Delete(ROW2);
- delete.deleteFamilyVersion(FAMILY, ts[1]); // delete version '2000'
- delete.deleteFamilyVersion(FAMILY, ts[3]); // delete version '4000'
+ delete.addFamilyVersion(FAMILY, ts[1]); // delete version '2000'
+ delete.addFamilyVersion(FAMILY, ts[3]); // delete version '4000'
ht.delete(delete);
admin.flush(TABLE);
@@ -1830,7 +1830,7 @@ public class TestFromClientSide {
ht.put(put);
Delete delete = new Delete(ROW);
- delete.deleteFamily(FAMILIES[0], ts[0]);
+ delete.addFamily(FAMILIES[0], ts[0]);
ht.delete(delete);
Get get = new Get(ROW);
@@ -1862,7 +1862,7 @@ public class TestFromClientSide {
ht.put(put);
delete = new Delete(ROW);
- delete.deleteColumn(FAMILIES[0], QUALIFIER); // ts[4]
+ delete.addColumn(FAMILIES[0], QUALIFIER); // ts[4]
ht.delete(delete);
get = new Get(ROW);
@@ -1885,12 +1885,12 @@ public class TestFromClientSide {
// Test for HBASE-1847
delete = new Delete(ROW);
- delete.deleteColumn(FAMILIES[0], null);
+ delete.addColumn(FAMILIES[0], null);
ht.delete(delete);
// Cleanup null qualifier
delete = new Delete(ROW);
- delete.deleteColumns(FAMILIES[0], null);
+ delete.addColumns(FAMILIES[0], null);
ht.delete(delete);
// Expected client behavior might be that you can re-put deleted values
@@ -1958,17 +1958,17 @@ public class TestFromClientSide {
result.size() == 4);
delete = new Delete(ROWS[0]);
- delete.deleteFamily(FAMILIES[2]);
+ delete.addFamily(FAMILIES[2]);
ht.delete(delete);
delete = new Delete(ROWS[1]);
- delete.deleteColumns(FAMILIES[1], QUALIFIER);
+ delete.addColumns(FAMILIES[1], QUALIFIER);
ht.delete(delete);
delete = new Delete(ROWS[2]);
- delete.deleteColumn(FAMILIES[1], QUALIFIER);
- delete.deleteColumn(FAMILIES[1], QUALIFIER);
- delete.deleteColumn(FAMILIES[2], QUALIFIER);
+ delete.addColumn(FAMILIES[1], QUALIFIER);
+ delete.addColumn(FAMILIES[1], QUALIFIER);
+ delete.addColumn(FAMILIES[2], QUALIFIER);
ht.delete(delete);
get = new Get(ROWS[0]);
@@ -2036,7 +2036,7 @@ public class TestFromClientSide {
// Test if we delete the family first in one row (HBASE-1541)
delete = new Delete(ROWS[3]);
- delete.deleteFamily(FAMILIES[1]);
+ delete.addFamily(FAMILIES[1]);
ht.delete(delete);
put = new Put(ROWS[3]);
@@ -2102,7 +2102,7 @@ public class TestFromClientSide {
for (int i = 0; i < 10; i++) {
byte [] bytes = Bytes.toBytes(i);
delete = new Delete(bytes);
- delete.deleteFamily(FAMILIES[0]);
+ delete.addFamily(FAMILIES[0]);
deletes.add(delete);
}
ht.delete(deletes);
@@ -3467,8 +3467,8 @@ public class TestFromClientSide {
// Delete a version in the memstore and a version in a storefile
Delete delete = new Delete(ROW);
- delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[11]);
- delete.deleteColumn(FAMILY, QUALIFIER, STAMPS[7]);
+ delete.addColumn(FAMILY, QUALIFIER, STAMPS[11]);
+ delete.addColumn(FAMILY, QUALIFIER, STAMPS[7]);
ht.delete(delete);
// Test that it's gone
@@ -4350,7 +4350,7 @@ public class TestFromClientSide {
p.addColumn(FAMILY, QUALIFIERS[1], VALUE);
arm.add(p);
Delete d = new Delete(ROW);
- d.deleteColumns(FAMILY, QUALIFIERS[0]);
+ d.addColumns(FAMILY, QUALIFIERS[0]);
arm.add(d);
// TODO: Trying mutateRow again. The batch was failing with a one try only.
t.mutateRow(arm);
@@ -4897,7 +4897,7 @@ public class TestFromClientSide {
put3.addColumn(FAMILY, QUALIFIER, value3);
Delete delete = new Delete(ROW);
- delete.deleteColumns(FAMILY, QUALIFIER);
+ delete.addColumns(FAMILY, QUALIFIER);
// cell = "bbbb", using "aaaa" to compare only LESS/LESS_OR_EQUAL/NOT_EQUAL
// turns out "match"
@@ -5427,7 +5427,7 @@ public class TestFromClientSide {
try {
Delete delete = new Delete(ROW);
- delete.deleteFamily(FAMILY, -1);
+ delete.addFamily(FAMILY, -1);
table.delete(delete);
fail("Negative timestamps should not have been allowed");
} catch (IllegalArgumentException ex) {
@@ -5909,7 +5909,7 @@ public class TestFromClientSide {
ht.put(put);
scanTestNull(ht, ROW, FAMILY, VALUE, true);
Delete delete = new Delete(ROW);
- delete.deleteColumns(FAMILY, null);
+ delete.addColumns(FAMILY, null);
ht.delete(delete);
// Use a new table
byte[] TABLE2 = Bytes.toBytes("testNull2WithReverseScan");
@@ -5922,7 +5922,7 @@ public class TestFromClientSide {
TEST_UTIL.flush();
scanTestNull(ht, ROW, FAMILY, VALUE, true);
delete = new Delete(ROW);
- delete.deleteColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY);
+ delete.addColumns(FAMILY, HConstants.EMPTY_BYTE_ARRAY);
ht.delete(delete);
// Null value
put = new Put(ROW);
@@ -5951,7 +5951,7 @@ public class TestFromClientSide {
ht.put(put);
Delete delete = new Delete(ROW);
- delete.deleteFamily(FAMILIES[0], ts[0]);
+ delete.addFamily(FAMILIES[0], ts[0]);
ht.delete(delete);
Scan scan = new Scan(ROW);
@@ -5973,7 +5973,7 @@ public class TestFromClientSide {
ht.put(put);
delete = new Delete(ROW);
- delete.deleteColumn(FAMILIES[0], QUALIFIER); // ts[4]
+ delete.addColumn(FAMILIES[0], QUALIFIER); // ts[4]
ht.delete(delete);
scan = new Scan(ROW);
@@ -5986,12 +5986,12 @@ public class TestFromClientSide {
// Test for HBASE-1847
delete = new Delete(ROW);
- delete.deleteColumn(FAMILIES[0], null);
+ delete.addColumn(FAMILIES[0], null);
ht.delete(delete);
// Cleanup null qualifier
delete = new Delete(ROW);
- delete.deleteColumns(FAMILIES[0], null);
+ delete.addColumns(FAMILIES[0], null);
ht.delete(delete);
// Expected client behavior might be that you can re-put deleted values
@@ -6038,17 +6038,17 @@ public class TestFromClientSide {
ht.put(put);
delete = new Delete(ROWS[0]);
- delete.deleteFamily(FAMILIES[2]);
+ delete.addFamily(FAMILIES[2]);
ht.delete(delete);
delete = new Delete(ROWS[1]);
- delete.deleteColumns(FAMILIES[1], QUALIFIER);
+ delete.addColumns(FAMILIES[1], QUALIFIER);
ht.delete(delete);
delete = new Delete(ROWS[2]);
- delete.deleteColumn(FAMILIES[1], QUALIFIER);
- delete.deleteColumn(FAMILIES[1], QUALIFIER);
- delete.deleteColumn(FAMILIES[2], QUALIFIER);
+ delete.addColumn(FAMILIES[1], QUALIFIER);
+ delete.addColumn(FAMILIES[1], QUALIFIER);
+ delete.addColumn(FAMILIES[2], QUALIFIER);
ht.delete(delete);
scan = new Scan(ROWS[0]);
@@ -6084,7 +6084,7 @@ public class TestFromClientSide {
// Test if we delete the family first in one row (HBASE-1541)
delete = new Delete(ROWS[3]);
- delete.deleteFamily(FAMILIES[1]);
+ delete.addFamily(FAMILIES[1]);
ht.delete(delete);
put = new Put(ROWS[3]);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index c83b709..59c863e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -399,7 +399,7 @@ public class TestMultiParallel {
ArrayList<Delete> deletes = new ArrayList<Delete>();
for (int i = 0; i < KEYS.length; i++) {
Delete delete = new Delete(KEYS[i]);
- delete.deleteFamily(BYTES_FAMILY);
+ delete.addFamily(BYTES_FAMILY);
deletes.add(delete);
}
table.delete(deletes);
@@ -615,7 +615,7 @@ public class TestMultiParallel {
// 3 delete
Delete delete = new Delete(KEYS[20]);
- delete.deleteFamily(BYTES_FAMILY);
+ delete.addFamily(BYTES_FAMILY);
actions.add(delete);
// 4 get
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
index 174b430..56f01c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
@@ -512,7 +512,7 @@ public class TestMultipleTimestamps {
byte row[] = Bytes.toBytes("row:" + rowIdx);
byte column[] = Bytes.toBytes("column:" + colIdx);
Delete del = new Delete(row);
- del.deleteColumn(cf, column, version);
+ del.addColumn(cf, column, version);
ht.delete(del);
}
@@ -526,7 +526,7 @@ public class TestMultipleTimestamps {
byte row[] = Bytes.toBytes("row:" + rowIdx);
byte column[] = Bytes.toBytes("column:" + colIdx);
Delete del = new Delete(row);
- del.deleteColumns(cf, column, version);
+ del.addColumns(cf, column, version);
ht.delete(del);
}
@@ -534,14 +534,14 @@ public class TestMultipleTimestamps {
byte row[] = Bytes.toBytes("row:" + rowIdx);
byte column[] = Bytes.toBytes("column:" + colIdx);
Delete del = new Delete(row);
- del.deleteColumns(cf, column);
+ del.addColumns(cf, column);
ht.delete(del);
}
private void deleteFamily(Table ht, byte[] cf, int rowIdx) throws IOException {
byte row[] = Bytes.toBytes("row:" + rowIdx);
Delete del = new Delete(row);
- del.deleteFamily(cf);
+ del.addFamily(cf);
ht.delete(del);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index 50efed9..60eb92f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -84,7 +84,7 @@ public class TestPutDeleteEtcCellIteration {
Delete d = new Delete(ROW);
for (int i = 0; i < COUNT; i++) {
byte [] bytes = Bytes.toBytes(i);
- d.deleteColumn(bytes, bytes, TIMESTAMP);
+ d.addColumn(bytes, bytes, TIMESTAMP);
}
int index = 0;
for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
index 54963ae..77771ba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
@@ -135,7 +135,7 @@ public class TestScannersFromClientSide {
// delete upto ts: 3
delete = new Delete(ROW);
- delete.deleteFamily(FAMILY, 3);
+ delete.addFamily(FAMILY, 3);
ht.delete(delete);
// without batch
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
index 567e887..c803752 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
@@ -376,7 +376,7 @@ public class TestTimestampsFilter {
byte row[] = Bytes.toBytes("row:" + rowIdx);
byte column[] = Bytes.toBytes("column:" + colIdx);
Delete del = new Delete(row);
- del.deleteColumn(cf, column, version);
+ del.addColumn(cf, column, version);
ht.delete(del);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 45ba04b..abfadec 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -157,9 +157,9 @@ public class TestRegionObserverInterface {
new Boolean[] { true, true, true, true, false, false });
Delete delete = new Delete(ROW);
- delete.deleteColumn(A, A);
- delete.deleteColumn(B, B);
- delete.deleteColumn(C, C);
+ delete.addColumn(A, A);
+ delete.addColumn(B, B);
+ delete.addColumn(C, C);
table.delete(delete);
verifyMethodResult(SimpleRegionObserver.class,
@@ -194,9 +194,9 @@ public class TestRegionObserverInterface {
put.addColumn(C, C, C);
Delete delete = new Delete(ROW);
- delete.deleteColumn(A, A);
- delete.deleteColumn(B, B);
- delete.deleteColumn(C, C);
+ delete.addColumn(A, A);
+ delete.addColumn(B, B);
+ delete.addColumn(C, C);
RowMutations arm = new RowMutations(ROW);
arm.add(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index e0a486e..1e89685 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -189,14 +189,14 @@ public class TestFilter {
// Delete the second qualifier from all rows and families
for(byte [] ROW : ROWS_ONE) {
Delete d = new Delete(ROW);
- d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
- d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
+ d.addColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
+ d.addColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
this.region.delete(d);
}
for(byte [] ROW : ROWS_TWO) {
Delete d = new Delete(ROW);
- d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
- d.deleteColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
+ d.addColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
+ d.addColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
this.region.delete(d);
}
colsPerRow -= 2;
@@ -204,14 +204,14 @@ public class TestFilter {
// Delete the second rows from both groups, one column at a time
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
Delete d = new Delete(ROWS_ONE[1]);
- d.deleteColumns(FAMILIES[0], QUALIFIER);
- d.deleteColumns(FAMILIES[1], QUALIFIER);
+ d.addColumns(FAMILIES[0], QUALIFIER);
+ d.addColumns(FAMILIES[1], QUALIFIER);
this.region.delete(d);
}
for(byte [] QUALIFIER : QUALIFIERS_TWO) {
Delete d = new Delete(ROWS_TWO[1]);
- d.deleteColumns(FAMILIES[0], QUALIFIER);
- d.deleteColumns(FAMILIES[1], QUALIFIER);
+ d.addColumns(FAMILIES[0], QUALIFIER);
+ d.addColumns(FAMILIES[1], QUALIFIER);
this.region.delete(d);
}
numRows -= 2;
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 5cf2281..cb8b06f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -312,7 +312,7 @@ public class TestImportExport {
Delete d = new Delete(ROW1, now+3);
t.delete(d);
d = new Delete(ROW1);
- d.deleteColumns(FAMILYA, QUAL, now+2);
+ d.addColumns(FAMILYA, QUAL, now+2);
t.delete(d);
String[] args = new String[] {
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
index 2b96d8c..9830d64 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTSVWithVisibilityLabels.java
@@ -191,7 +191,7 @@ public class TestImportTSVWithVisibilityLabels implements Configurable {
for (int i = 0; i < numRetries; i++) {
try {
Delete d = new Delete(Bytes.toBytes("KEY"));
- d.deleteFamily(Bytes.toBytes(FAMILY));
+ d.addFamily(Bytes.toBytes(FAMILY));
d.setCellVisibility(new CellVisibility("private&secret"));
table.delete(d);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index a9841db..c17d408 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -102,7 +102,7 @@ public class TestWALPlayer {
t1.put(p);
// delete one column
Delete d = new Delete(ROW);
- d.deleteColumns(FAMILY, COLUMN1);
+ d.addColumns(FAMILY, COLUMN1);
t1.delete(d);
// replay the WAL, map table 1 to table 2
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index 67e1801..c825be4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -254,11 +254,11 @@ public class TestMasterOperationsForRegionReplicas {
Table metaTable = ADMIN.getConnection().getTable(TableName.META_TABLE_NAME);
for (byte[] row : tableRows) {
Delete deleteOneReplicaLocation = new Delete(row);
- deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY,
+ deleteOneReplicaLocation.addColumns(HConstants.CATALOG_FAMILY,
MetaTableAccessor.getServerColumn(1));
- deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY,
+ deleteOneReplicaLocation.addColumns(HConstants.CATALOG_FAMILY,
MetaTableAccessor.getSeqNumColumn(1));
- deleteOneReplicaLocation.deleteColumns(HConstants.CATALOG_FAMILY,
+ deleteOneReplicaLocation.addColumns(HConstants.CATALOG_FAMILY,
MetaTableAccessor.getStartCodeColumn(1));
metaTable.delete(deleteOneReplicaLocation);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
index 7b48783..5ea219a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
@@ -390,12 +390,12 @@ public class TestAtomicOperation {
p.setDurability(Durability.ASYNC_WAL);
rm.add(p);
Delete d = new Delete(row);
- d.deleteColumns(fam1, qual2, ts);
+ d.addColumns(fam1, qual2, ts);
d.setDurability(Durability.ASYNC_WAL);
rm.add(d);
} else {
Delete d = new Delete(row);
- d.deleteColumns(fam1, qual1, ts);
+ d.addColumns(fam1, qual1, ts);
d.setDurability(Durability.ASYNC_WAL);
rm.add(d);
Put p = new Put(row, ts);
@@ -483,12 +483,12 @@ public class TestAtomicOperation {
p.setDurability(Durability.ASYNC_WAL);
mrm.add(p);
Delete d = new Delete(row);
- d.deleteColumns(fam1, qual1, ts);
+ d.addColumns(fam1, qual1, ts);
d.setDurability(Durability.ASYNC_WAL);
mrm.add(d);
} else {
Delete d = new Delete(row2);
- d.deleteColumns(fam1, qual1, ts);
+ d.addColumns(fam1, qual1, ts);
d.setDurability(Durability.ASYNC_WAL);
mrm.add(d);
Put p = new Put(row, ts);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
index b0a43b7..2dfbee6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
@@ -181,9 +181,9 @@ public class TestBlocksRead {
private void deleteFamily(String family, String row, long version)
throws IOException {
Delete del = new Delete(Bytes.toBytes(row));
- del.deleteFamily(Bytes.toBytes(family + "_ROWCOL"), version);
- del.deleteFamily(Bytes.toBytes(family + "_ROW"), version);
- del.deleteFamily(Bytes.toBytes(family + "_NONE"), version);
+ del.addFamily(Bytes.toBytes(family + "_ROWCOL"), version);
+ del.addFamily(Bytes.toBytes(family + "_ROW"), version);
+ del.addFamily(Bytes.toBytes(family + "_NONE"), version);
region.delete(del);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
index 6bdda43..b374bdc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
@@ -184,7 +184,7 @@ public class TestCompaction {
for (int i = 0; i < compactionThreshold; i++) {
Delete delete = new Delete(Bytes.add(STARTROW, Bytes.toBytes(i)));
byte [][] famAndQf = {COLUMN_FAMILY, null};
- delete.deleteFamily(famAndQf[0]);
+ delete.addFamily(famAndQf[0]);
r.delete(delete);
}
r.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 9b82cc5..abd6d3f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1388,7 +1388,7 @@ public class TestHRegion {
else
break;
Delete delete = new Delete(CellUtil.cloneRow(results.get(0)));
- delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
+ delete.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
r.delete(delete);
results.clear();
} while (more);
@@ -1690,7 +1690,7 @@ public class TestHRegion {
assertFalse(res);
Delete delete = new Delete(row1);
- delete.deleteColumn(fam1, qf1);
+ delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal),
delete, true);
assertFalse(res);
@@ -1704,8 +1704,8 @@ public class TestHRegion {
// checkAndDelete with correct value
delete = new Delete(row1);
- delete.deleteColumn(fam1, qf1);
- delete.deleteColumn(fam1, qf1);
+ delete.addColumn(fam1, qf1);
+ delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2),
delete, true);
assertTrue(res);
@@ -1752,7 +1752,7 @@ public class TestHRegion {
// checkAndDelete with wrong value
Delete delete = new Delete(row1);
- delete.deleteFamily(fam1);
+ delete.addFamily(fam1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2),
put, true);
assertEquals(false, res);
@@ -1785,7 +1785,7 @@ public class TestHRegion {
// checkAndDelete with correct value
Delete delete = new Delete(row1);
- delete.deleteColumn(fam1, qf1);
+ delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1),
delete, true);
assertEquals(true, res);
@@ -1992,9 +1992,9 @@ public class TestHRegion {
// Multi-column delete
Delete delete = new Delete(row1);
- delete.deleteColumn(fam1, qf1);
- delete.deleteColumn(fam2, qf1);
- delete.deleteColumn(fam1, qf3);
+ delete.addColumn(fam1, qf1);
+ delete.addColumn(fam2, qf1);
+ delete.addColumn(fam1, qf3);
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(
val2), delete, true);
assertEquals(true, res);
@@ -2010,7 +2010,7 @@ public class TestHRegion {
// Family delete
delete = new Delete(row1);
- delete.deleteFamily(fam2);
+ delete.addFamily(fam2);
res = region.checkAndMutate(row1, fam2, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal),
delete, true);
assertEquals(true, res);
@@ -2055,8 +2055,8 @@ public class TestHRegion {
// We do support deleting more than 1 'latest' version
Delete delete = new Delete(row1);
- delete.deleteColumn(fam1, qual);
- delete.deleteColumn(fam1, qual);
+ delete.addColumn(fam1, qual);
+ delete.addColumn(fam1, qual);
region.delete(delete);
Get get = new Get(row1);
@@ -2143,7 +2143,7 @@ public class TestHRegion {
// ok now delete a split:
Delete delete = new Delete(row);
- delete.deleteColumns(fam, splitA);
+ delete.addColumns(fam, splitA);
region.delete(delete);
// assert some things:
@@ -2317,7 +2317,7 @@ public class TestHRegion {
byte[] value = Bytes.toBytes("value");
Delete delete = new Delete(rowA);
- delete.deleteFamily(fam1);
+ delete.addFamily(fam1);
region.delete(delete);
@@ -2350,14 +2350,14 @@ public class TestHRegion {
@Test
public void testDeleteColumns_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
- delete.deleteColumns(fam1, qual1);
+ delete.addColumns(fam1, qual1);
doTestDelete_AndPostInsert(delete);
}
@Test
- public void testDeleteFamily_PostInsert() throws IOException, InterruptedException {
+ public void testaddFamily_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
- delete.deleteFamily(fam1);
+ delete.addFamily(fam1);
doTestDelete_AndPostInsert(delete);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
index bb72b1d..3e32772 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
@@ -204,7 +204,7 @@ public class TestKeepDeletes {
region.put(p);
Delete d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts);
+ d.addColumn(c0, c0, ts);
region.delete(d);
// scan still returns delete markers and deletes rows
@@ -256,7 +256,7 @@ public class TestKeepDeletes {
Delete d = new Delete(T1, ts+2);
- d.deleteColumn(c0, c0, ts);
+ d.addColumn(c0, c0, ts);
region.delete(d);
// "past" get does not see rows behind delete marker
@@ -335,11 +335,11 @@ public class TestKeepDeletes {
region.delete(d);
d = new Delete(T1, ts+2);
- d.deleteColumn(c0, c0, ts+2);
+ d.addColumn(c0, c0, ts+2);
region.delete(d);
d = new Delete(T1, ts+3);
- d.deleteColumns(c0, c0, ts+3);
+ d.addColumns(c0, c0, ts+3);
region.delete(d);
Scan s = new Scan();
@@ -413,19 +413,19 @@ public class TestKeepDeletes {
long ts = EnvironmentEdgeManager.currentTime();
Delete d = new Delete(T1, ts);
- d.deleteColumns(c0, c0, ts);
+ d.addColumns(c0, c0, ts);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteFamily(c0);
+ d.addFamily(c0);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts+1);
+ d.addColumn(c0, c0, ts+1);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts+2);
+ d.addColumn(c0, c0, ts+2);
region.delete(d);
// 1 family marker, 1 column marker, 2 version markers
@@ -466,19 +466,19 @@ public class TestKeepDeletes {
// all the following deletes affect the put
Delete d = new Delete(T1, ts);
- d.deleteColumns(c0, c0, ts);
+ d.addColumns(c0, c0, ts);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteFamily(c0, ts);
+ d.addFamily(c0, ts);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts+1);
+ d.addColumn(c0, c0, ts+1);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts+2);
+ d.addColumn(c0, c0, ts+2);
region.delete(d);
// 1 family marker, 1 column marker, 2 version markers
@@ -529,19 +529,19 @@ public class TestKeepDeletes {
// all the following deletes affect the put
Delete d = new Delete(T1, ts);
- d.deleteColumns(c0, c0, ts);
+ d.addColumns(c0, c0, ts);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteFamily(c0, ts);
+ d.addFamily(c0, ts);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts+1);
+ d.addColumn(c0, c0, ts+1);
region.delete(d);
d = new Delete(T1, ts);
- d.deleteColumn(c0, c0, ts+2);
+ d.addColumn(c0, c0, ts+2);
region.delete(d);
// 1 family marker, 1 column marker, 2 version markers
@@ -624,20 +624,20 @@ public class TestKeepDeletes {
region.put(p);
Delete d = new Delete(T1, ts+2);
- d.deleteColumns(c0, c0, ts+2);
+ d.addColumns(c0, c0, ts+2);
region.delete(d);
d = new Delete(T1, ts+2);
- d.deleteFamily(c1, ts+2);
+ d.addFamily(c1, ts+2);
region.delete(d);
d = new Delete(T2, ts+2);
- d.deleteFamily(c0, ts+2);
+ d.addFamily(c0, ts+2);
region.delete(d);
// add an older delete, to make sure it is filtered
d = new Delete(T1, ts-10);
- d.deleteFamily(c1, ts-10);
+ d.addFamily(c1, ts-10);
region.delete(d);
// ts + 2 does NOT include the delete at ts+2
@@ -689,15 +689,15 @@ public class TestKeepDeletes {
Delete d = new Delete(T1, ts);
// test corner case (Put and Delete have same TS)
- d.deleteColumns(c0, c0, ts);
+ d.addColumns(c0, c0, ts);
region.delete(d);
d = new Delete(T1, ts+1);
- d.deleteColumn(c0, c0, ts+1);
+ d.addColumn(c0, c0, ts+1);
region.delete(d);
d = new Delete(T1, ts+3);
- d.deleteColumn(c0, c0, ts+3);
+ d.addColumn(c0, c0, ts+3);
region.delete(d);
region.flush(true);
@@ -842,7 +842,7 @@ public class TestKeepDeletes {
region.delete(d);
// and a column delete marker
d = new Delete(T1, ts-2);
- d.deleteColumns(c0, c0, ts-1);
+ d.addColumns(c0, c0, ts-1);
region.delete(d);
Get g = new Get(T1);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
index ede0793..3ef89ad 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java
@@ -249,7 +249,7 @@ public class TestMajorCompaction {
LOG.debug("Adding deletes to memstore and flushing");
Delete delete = new Delete(secondRowBytes, System.currentTimeMillis());
byte [][] famAndQf = {COLUMN_FAMILY, null};
- delete.deleteFamily(famAndQf[0]);
+ delete.addFamily(famAndQf[0]);
r.delete(delete);
// Assert deleted.
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
index 261c007..1bd20c6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
@@ -105,14 +105,14 @@ public class TestMinorCompaction {
public void testMinorCompactionWithDeleteColumn1() throws Exception {
Delete dc = new Delete(secondRowBytes);
/* delete all timestamps in the column */
- dc.deleteColumns(fam2, col2);
+ dc.addColumns(fam2, col2);
testMinorCompactionWithDelete(dc);
}
@Test
public void testMinorCompactionWithDeleteColumn2() throws Exception {
Delete dc = new Delete(secondRowBytes);
- dc.deleteColumn(fam2, col2);
+ dc.addColumn(fam2, col2);
/* compactionThreshold is 3. The table has 4 versions: 0, 1, 2, and 3.
* we only delete the latest version. One might expect to see only
* versions 1 and 2. HBase differs, and gives us 0, 1 and 2.
@@ -125,14 +125,14 @@ public class TestMinorCompaction {
@Test
public void testMinorCompactionWithDeleteColumnFamily() throws Exception {
Delete deleteCF = new Delete(secondRowBytes);
- deleteCF.deleteFamily(fam2);
+ deleteCF.addFamily(fam2);
testMinorCompactionWithDelete(deleteCF);
}
@Test
public void testMinorCompactionWithDeleteVersion1() throws Exception {
Delete deleteVersion = new Delete(secondRowBytes);
- deleteVersion.deleteColumns(fam2, col2, 2);
+ deleteVersion.addColumns(fam2, col2, 2);
/* compactionThreshold is 3. The table has 4 versions: 0, 1, 2, and 3.
* We delete versions 0 ... 2. So, we still have one remaining.
*/
@@ -142,7 +142,7 @@ public class TestMinorCompaction {
@Test
public void testMinorCompactionWithDeleteVersion2() throws Exception {
Delete deleteVersion = new Delete(secondRowBytes);
- deleteVersion.deleteColumn(fam2, col2, 1);
+ deleteVersion.addColumn(fam2, col2, 1);
/*
* the table has 4 versions: 0, 1, 2, and 3.
* We delete 1.
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
index 5224647..ff6f09b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
@@ -209,7 +209,7 @@ public class TestMultiColumnScanner {
boolean deletedSomething = false;
for (long ts : TIMESTAMPS)
if (rand.nextDouble() < DELETE_PROBABILITY) {
- d.deleteColumns(FAMILY_BYTES, qualBytes, ts);
+ d.addColumns(FAMILY_BYTES, qualBytes, ts);
String rowAndQual = row + "_" + qual;
Long whenDeleted = lastDelTimeMap.get(rowAndQual);
lastDelTimeMap.put(rowAndQual, whenDeleted == null ? ts
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
index 8f0cd4c..1b42754 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -526,7 +526,7 @@ public class TestScanner {
Delete dc = new Delete(firstRowBytes);
/* delete column1 of firstRow */
- dc.deleteColumns(fam1, col1);
+ dc.addColumns(fam1, col1);
region.delete(dc);
region.flush(true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
index fc3735a..b31be9d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
@@ -323,7 +323,7 @@ public class TestSeekOptimizations {
}
public void delAtTimestamp(String qual, long ts) {
- del.deleteColumn(FAMILY_BYTES, Bytes.toBytes(qual), ts);
+ del.addColumn(FAMILY_BYTES, Bytes.toBytes(qual), ts);
logDelete(qual, ts, "at");
}
@@ -336,7 +336,7 @@ public class TestSeekOptimizations {
}
private void delUpToTimestamp(String qual, long upToTS) {
- del.deleteColumns(FAMILY_BYTES, Bytes.toBytes(qual), upToTS);
+ del.addColumns(FAMILY_BYTES, Bytes.toBytes(qual), upToTS);
logDelete(qual, upToTS, "up to and including");
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
index c9b20d5..29a052b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java
@@ -409,7 +409,7 @@ public class TestPerTableCFReplication {
Table source, Table... targets)
throws Exception {
Delete del = new Delete(row);
- del.deleteFamily(fam);
+ del.addFamily(fam);
source.delete(del);
Get get = new Get(row);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index 7d51ef5..30249fd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -153,7 +153,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
}
// place a version delete marker (delete last version)
Delete d = new Delete(row);
- d.deleteColumn(famName, row, t);
+ d.addColumn(famName, row, t);
htable1.delete(d);
get = new Get(row);
@@ -175,7 +175,7 @@ public class TestReplicationSmallTests extends TestReplicationBase {
// place a column delete marker
d = new Delete(row);
- d.deleteColumns(famName, row, t+2);
+ d.addColumns(famName, row, t+2);
htable1.delete(d);
// now *both* of the remaining version should be deleted
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 8d97915..8ecc6e3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -935,7 +935,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Delete d = new Delete(TEST_ROW);
- d.deleteFamily(TEST_FAMILY);
+ d.addFamily(TEST_FAMILY);
try(Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(TEST_TABLE)) {
t.delete(d);
@@ -968,7 +968,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Delete d = new Delete(TEST_ROW);
- d.deleteFamily(TEST_FAMILY);
+ d.addFamily(TEST_FAMILY);
try(Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(TEST_TABLE);) {
t.checkAndDelete(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER,
@@ -1333,8 +1333,8 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Delete d = new Delete(TEST_ROW);
- d.deleteFamily(family1);
- d.deleteFamily(family2);
+ d.addFamily(family1);
+ d.addFamily(family2);
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName)) {
t.delete(d);
@@ -1347,7 +1347,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Delete d = new Delete(TEST_ROW);
- d.deleteFamily(family1);
+ d.addFamily(family1);
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName)) {
t.delete(d);
@@ -1360,7 +1360,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Delete d = new Delete(TEST_ROW);
- d.deleteFamily(family2);
+ d.addFamily(family2);
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName)) {
t.delete(d);
@@ -1528,7 +1528,7 @@ public class TestAccessController extends SecureTestUtil {
@Override
public Object run() throws Exception {
Delete d = new Delete(TEST_ROW);
- d.deleteColumn(family1, qualifier);
+ d.addColumn(family1, qualifier);
// d.deleteFamily(family1);
try (Connection conn = ConnectionFactory.createConnection(conf);
Table t = conn.getTable(tableName)) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
index 549db3c..bbc6ad0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLWithMultipleVersions.java
@@ -327,8 +327,8 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Delete d = new Delete(TEST_ROW1);
- d.deleteColumns(TEST_FAMILY1, TEST_Q1);
- d.deleteColumns(TEST_FAMILY1, TEST_Q2);
+ d.addColumns(TEST_FAMILY1, TEST_Q1);
+ d.addColumns(TEST_FAMILY1, TEST_Q2);
t.delete(d);
}
}
@@ -350,7 +350,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Delete d = new Delete(TEST_ROW2);
- d.deleteFamily(TEST_FAMILY1);
+ d.addFamily(TEST_FAMILY1);
t.delete(d);
}
}
@@ -522,7 +522,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Delete d = new Delete(TEST_ROW, 124L);
- d.deleteColumns(TEST_FAMILY1, TEST_Q1);
+ d.addColumns(TEST_FAMILY1, TEST_Q1);
t.delete(d);
}
}
@@ -537,7 +537,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Delete d = new Delete(TEST_ROW);
- d.deleteColumns(TEST_FAMILY1, TEST_Q2, 124L);
+ d.addColumns(TEST_FAMILY1, TEST_Q2, 124L);
t.delete(d);
}
}
@@ -616,9 +616,9 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Delete d = new Delete(TEST_ROW1);
- d.deleteColumn(TEST_FAMILY1, TEST_Q1, 123);
- d.deleteColumn(TEST_FAMILY1, TEST_Q2);
- d.deleteFamilyVersion(TEST_FAMILY2, 125);
+ d.addColumn(TEST_FAMILY1, TEST_Q1, 123);
+ d.addColumn(TEST_FAMILY1, TEST_Q2);
+ d.addFamilyVersion(TEST_FAMILY2, 125);
t.delete(d);
}
}
@@ -905,7 +905,7 @@ public class TestCellACLWithMultipleVersions extends SecureTestUtil {
try (Connection connection = ConnectionFactory.createConnection(conf)) {
try (Table t = connection.getTable(TEST_TABLE.getTableName())) {
Delete d = new Delete(TEST_ROW1);
- d.deleteColumns(TEST_FAMILY1, TEST_Q1, 120);
+ d.addColumns(TEST_FAMILY1, TEST_Q1, 120);
t.checkAndDelete(TEST_ROW1, TEST_FAMILY1, TEST_Q1, ZERO, d);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
index 7f1e720..5ddfb9c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java
@@ -348,7 +348,7 @@ public class TestCellACLs extends SecureTestUtil {
AccessTestAction deleteFamily = new AccessTestAction() {
@Override
public Object run() throws Exception {
- Delete delete = new Delete(TEST_ROW).deleteFamily(TEST_FAMILY);
+ Delete delete = new Delete(TEST_ROW).addFamily(TEST_FAMILY);
try(Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName())) {
t.delete(delete);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
index d538498..104cb5b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java
@@ -127,7 +127,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
for (byte[] authLabel : authLabels) {
String authLabelStr = Bytes.toString(authLabel);
if (currentAuths.contains(authLabelStr)) {
- d.deleteColumns(LABELS_TABLE_FAMILY, authLabel);
+ d.addColumns(LABELS_TABLE_FAMILY, authLabel);
} else {
// This label is not set for the user.
finalOpStatus[i] = new OperationStatus(OperationStatusCode.FAILURE,
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
index c67d869..52c43b2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDeletes.java
@@ -201,7 +201,7 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(TOPSECRET + "|" + CONFIDENTIAL));
- d.deleteFamilyVersion(fam, 123l);
+ d.addFamilyVersion(fam, 123l);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1799,11 +1799,11 @@ public class TestVisibilityLabelsWithDeletes {
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
d.setCellVisibility(new CellVisibility(CONFIDENTIAL));
- d.deleteFamilyVersion(fam, 123l);
+ d.addFamilyVersion(fam, 123l);
table.delete(d);
d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.deleteFamilyVersion(fam, 125l);
+ d.addFamilyVersion(fam, 125l);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1856,7 +1856,7 @@ public class TestVisibilityLabelsWithDeletes {
table.delete(d);
d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.deleteFamilyVersion(fam, 125l);
+ d.addFamilyVersion(fam, 125l);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -1957,7 +1957,7 @@ public class TestVisibilityLabelsWithDeletes {
d = new Delete(row1);
d.setCellVisibility(new CellVisibility(SECRET + "&" + TOPSECRET));
- d.deleteFamilyVersion(fam, 125l);
+ d.addFamilyVersion(fam, 125l);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
@@ -2715,7 +2715,7 @@ public class TestVisibilityLabelsWithDeletes {
try (Connection connection = ConnectionFactory.createConnection(conf);
Table table = connection.getTable(tableName)) {
Delete d = new Delete(row1);
- d.deleteFamilyVersion(fam, 126l);
+ d.addFamilyVersion(fam, 126l);
table.delete(d);
} catch (Throwable t) {
throw new IOException(t);
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index a467071..e28acc6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -216,7 +216,7 @@ public class MultiThreadedUpdater extends MultiThreadedWriterBase {
Delete delete = new Delete(rowKey);
// Delete all versions since a put
// could be called multiple times if CM is used
- delete.deleteColumns(cf, column);
+ delete.addColumns(cf, column);
mutate(table, delete, rowKeyBase, rowKey, cf, column, checkedValue);
buf.append(MutationType.DELETE.getNumber());
break;
http://git-wip-us.apache.org/repos/asf/hbase/blob/094d65e6/hbase-shell/src/main/ruby/hbase/table.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb b/hbase-shell/src/main/ruby/hbase/table.rb
index 153f07e..2535a68 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -193,7 +193,7 @@ EOF
end
if column
family, qualifier = parse_column_name(column)
- d.deleteColumns(family, qualifier, timestamp)
+ d.addColumns(family, qualifier, timestamp)
end
@table.delete(d)
end