You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2017/12/18 23:20:51 UTC
[2/2] hbase git commit: HBASE-19122 Suspect methods on Cell to be
deprecated
HBASE-19122 Suspect methods on Cell to be deprecated
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b4056d26
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b4056d26
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b4056d26
Branch: refs/heads/master
Commit: b4056d267a2f13dc31182f17cb8eaf275d703663
Parents: 9d0c7c6
Author: Michael Stack <st...@apache.org>
Authored: Mon Dec 18 15:20:15 2017 -0800
Committer: Michael Stack <st...@apache.org>
Committed: Mon Dec 18 15:20:44 2017 -0800
----------------------------------------------------------------------
.../apache/hadoop/hbase/MetaTableAccessor.java | 36 ++---
.../hadoop/hbase/filter/KeyOnlyFilter.java | 10 ++
.../hadoop/hbase/protobuf/ProtobufUtil.java | 8 +-
.../hbase/shaded/protobuf/ProtobufUtil.java | 7 +-
.../org/apache/hadoop/hbase/client/TestPut.java | 7 +-
.../hbase/ipc/TestHBaseRpcControllerImpl.java | 6 +
.../hadoop/hbase/ByteBufferKeyOnlyKeyValue.java | 4 +
.../apache/hadoop/hbase/ByteBufferKeyValue.java | 33 ++++
.../main/java/org/apache/hadoop/hbase/Cell.java | 42 ++++-
.../org/apache/hadoop/hbase/CellBuilder.java | 14 +-
.../java/org/apache/hadoop/hbase/CellUtil.java | 16 +-
.../org/apache/hadoop/hbase/ExtendedCell.java | 46 ++++++
.../hadoop/hbase/ExtendedCellBuilder.java | 18 ++-
.../hbase/ExtendedCellBuilderFactory.java | 12 +-
.../hadoop/hbase/ExtendedCellBuilderImpl.java | 36 ++---
.../hadoop/hbase/IndividualBytesFieldCell.java | 33 ++++
.../hbase/IndividualBytesFieldCellBuilder.java | 8 -
.../java/org/apache/hadoop/hbase/KeyValue.java | 42 +++--
.../apache/hadoop/hbase/KeyValueBuilder.java | 8 -
.../apache/hadoop/hbase/PrivateCellUtil.java | 162 ++++++++++++++++++-
.../java/org/apache/hadoop/hbase/RawCell.java | 9 +-
.../main/java/org/apache/hadoop/hbase/Tag.java | 33 ----
.../java/org/apache/hadoop/hbase/TagUtil.java | 35 ++++
.../io/encoding/BufferedDataBlockEncoder.java | 62 +++++++
.../apache/hadoop/hbase/TestCellBuilder.java | 4 +-
.../org/apache/hadoop/hbase/TestCellUtil.java | 10 ++
.../org/apache/hadoop/hbase/TestKeyValue.java | 5 +
.../example/MultiThreadedClientExample.java | 30 ++--
.../example/ValueRewritingObserver.java | 3 +-
.../example/WriteHeavyIncrementObserver.java | 5 +-
.../apache/hadoop/hbase/types/TestPBCell.java | 5 +-
.../apache/hadoop/hbase/util/MapReduceCell.java | 34 ++++
.../apache/hadoop/hbase/rest/RowResource.java | 12 +-
.../RegionCoprocessorEnvironment.java | 6 +-
.../favored/FavoredNodeAssignmentHelper.java | 5 +-
.../hbase/master/TableNamespaceManager.java | 12 +-
.../master/assignment/RegionStateStore.java | 8 +-
.../org/apache/hadoop/hbase/mob/MobUtils.java | 4 +-
.../compactions/PartitionedMobCompactor.java | 3 +-
.../hadoop/hbase/regionserver/HMobStore.java | 3 +-
.../hadoop/hbase/regionserver/HRegion.java | 2 +-
.../regionserver/RegionCoprocessorHost.java | 10 +-
.../security/access/AccessControlLists.java | 4 +-
.../DefaultVisibilityLabelServiceImpl.java | 18 +--
.../VisibilityReplicationEndpoint.java | 7 +-
.../hbase/client/TestMultiRespectsLimits.java | 16 +-
.../hbase/regionserver/MockHStoreFile.java | 8 +-
.../TestCompactionLifeCycleTracker.java | 6 +-
.../regionserver/TestFlushLifeCycleTracker.java | 6 +-
.../hadoop/hbase/regionserver/TestHRegion.java | 21 ++-
.../hadoop/hbase/regionserver/TestHStore.java | 11 +-
.../security/token/TestTokenAuthentication.java | 4 +-
.../ExpAsStringVisibilityLabelServiceImpl.java | 3 +-
.../hadoop/hbase/thrift/ThriftServerRunner.java | 9 +-
.../hadoop/hbase/thrift2/ThriftUtilities.java | 6 +-
55 files changed, 683 insertions(+), 284 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index df2102a..96899d0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -37,6 +37,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Consistency;
@@ -72,7 +73,6 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
@@ -1361,7 +1361,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_BARRIER_FAMILY)
.setQualifier(seqBytes)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(seqBytes)
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
@@ -1369,7 +1369,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_META_FAMILY)
.setQualifier(tableNameCq)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(tableName)
.build());
return put;
@@ -1383,7 +1383,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_META_FAMILY)
.setQualifier(daughterNameCq)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(value)
.build());
return put;
@@ -1396,7 +1396,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_META_FAMILY)
.setQualifier(parentNameCq)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(value)
.build());
return put;
@@ -1413,7 +1413,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.SPLITA_QUALIFIER)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(RegionInfo.toByteArray(splitA))
.build());
}
@@ -1423,7 +1423,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.SPLITB_QUALIFIER)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(RegionInfo.toByteArray(splitB))
.build());
}
@@ -1732,7 +1732,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.MERGEA_QUALIFIER)
.setTimestamp(putOfMerged.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(RegionInfo.toByteArray(regionA))
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
@@ -1740,7 +1740,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.MERGEB_QUALIFIER)
.setTimestamp(putOfMerged.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(RegionInfo.toByteArray(regionB))
.build());
@@ -1985,7 +1985,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_POSITION_FAMILY)
.setQualifier(Bytes.toBytes(peerId))
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(Math.abs(entry.getValue())))
.build());
puts.add(put);
@@ -2153,7 +2153,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(HConstants.REGIONINFO_QUALIFIER)
.setTimestamp(p.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(RegionInfo.toByteArray(hri))
.build());
return p;
@@ -2170,7 +2170,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getServerColumn(replicaId))
.setTimestamp(time)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(sn.getAddress().toString()))
.build())
.add(builder.clear()
@@ -2178,7 +2178,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getStartCodeColumn(replicaId))
.setTimestamp(time)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(sn.getStartcode()))
.build())
.add(builder.clear()
@@ -2186,7 +2186,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(time)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}
@@ -2199,21 +2199,21 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getServerColumn(replicaId))
.setTimestamp(now)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getStartCodeColumn(replicaId))
.setTimestamp(now)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(now)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.build());
}
@@ -2241,7 +2241,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(time)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index 606728e..644d1e8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -195,6 +195,11 @@ public class KeyOnlyFilter extends FilterBase {
}
@Override
+ public DataType getType() {
+ return cell.getType();
+ }
+
+ @Override
public long getSequenceId() {
return 0;
}
@@ -308,6 +313,11 @@ public class KeyOnlyFilter extends FilterBase {
}
@Override
+ public DataType getType() {
+ return cell.getType();
+ }
+
+ @Override
public byte[] getValueArray() {
if (lenAsVal) {
return Bytes.toBytes(cell.getValueLength());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index f334536..267dc7a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -27,6 +27,7 @@ import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
import com.google.protobuf.TextFormat;
+
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
@@ -37,9 +38,10 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet;
import java.util.function.Function;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@@ -534,7 +536,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.setTags(allTagsBytes)
.build());
@@ -554,7 +556,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.build());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index d9c699b..c9ea5a5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -38,13 +38,14 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@@ -676,7 +677,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.setTags(allTagsBytes)
.build());
@@ -696,7 +697,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.build());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java
index edc8a5a..0ae2dfa 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java
@@ -24,8 +24,9 @@ import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
+
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -87,7 +88,7 @@ public class TestPut {
.setFamily(family)
.setQualifier(qualifier0)
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(value0)
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
@@ -95,7 +96,7 @@ public class TestPut {
.setFamily(family)
.setQualifier(qualifier1)
.setTimestamp(ts1)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(value1)
.build());
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
index 0ec78ad..bfd1eb9 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java
@@ -177,6 +177,12 @@ public class TestHBaseRpcControllerImpl {
// unused
return null;
}
+
+ @Override
+ public DataType getType() {
+ // unused
+ return null;
+ }
};
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java
index 3522e2d..713314e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java
@@ -147,6 +147,10 @@ public class ByteBufferKeyOnlyKeyValue extends ByteBufferCell {
return ByteBufferUtils.toByte(this.buf, this.offset + this.length - 1);
}
+ public DataType getType() {
+ return PrivateCellUtil.toDataType(getTypeByte());
+ }
+
@Override
public long getSequenceId() {
return 0;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
index beadaf6..870d872 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
@@ -17,9 +17,15 @@
*/
package org.apache.hadoop.hbase;
+import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -344,4 +350,31 @@ public class ByteBufferKeyValue extends ByteBufferCell implements ExtendedCell {
hash = 31 * hash + cell.getTypeByte();
return hash;
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsPosition();
+ int pos = offset;
+ int tagLen;
+ while (pos < offset + length) {
+ ByteBuffer tagsBuffer = getTagsByteBuffer();
+ tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
+ if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
+ return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
index 2b99823..40f0a1c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
@@ -133,8 +133,7 @@ public interface Cell {
/**
* @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc
- * @deprecated since 2.0.0, use appropriate {@link CellUtil#isDelete} or
- * {@link CellUtil#isPut(Cell)} methods instead. This will be removed in 3.0.0.
+ * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Use {@link #getType()}.
*/
@Deprecated
byte getTypeByte();
@@ -148,7 +147,9 @@ public interface Cell {
* {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's
* row is no longer involved in any operations that require strict consistency.
* @return seqId (always > 0 if exists), or 0 if it no longer exists
+ * @deprecated As of HBase-2.0. Will be removed in HBase-3.0.
*/
+ @Deprecated
long getSequenceId();
//7) Value
@@ -173,12 +174,16 @@ public interface Cell {
/**
* Contiguous raw bytes representing tags that may start at any index in the containing array.
* @return the tags byte array
+ * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
+ @Deprecated
byte[] getTagsArray();
/**
* @return the first offset where the tags start in the Cell
+ * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
+ @Deprecated
int getTagsOffset();
/**
@@ -190,6 +195,39 @@ public interface Cell {
* less than Integer.MAX_VALUE.
*
* @return the total length of the tags in the Cell.
+ * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
+ @Deprecated
int getTagsLength();
+
+ /**
+ * Returns the type of cell in a human readable format using {@link DataType}
+ * @return The data type this cell: one of Put, Delete, etc
+ */
+ DataType getType();
+
+ /**
+ * The valid types for user to build the cell. Currently, This is subset of {@link KeyValue.Type}.
+ */
+ public enum DataType {
+ Put((byte) 4),
+
+ Delete((byte) 8),
+
+ DeleteFamilyVersion((byte) 10),
+
+ DeleteColumn((byte) 12),
+
+ DeleteFamily((byte) 14);
+
+ private final byte code;
+
+ DataType(final byte c) {
+ this.code = c;
+ }
+
+ public byte getCode() {
+ return this.code;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java
index aeff15a..e89ac37 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java
@@ -26,18 +26,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public interface CellBuilder {
- /**
- * The valid types for user to build the cell.
- * Currently, This is subset of {@link KeyValue.Type}.
- */
- enum DataType {
- Put,
- Delete,
- DeleteFamilyVersion,
- DeleteColumn,
- DeleteFamily
- }
-
CellBuilder setRow(final byte[] row);
CellBuilder setRow(final byte[] row, final int rOffset, final int rLength);
@@ -49,7 +37,7 @@ public interface CellBuilder {
CellBuilder setTimestamp(final long timestamp);
- CellBuilder setType(final DataType type);
+ CellBuilder setType(final Cell.DataType type);
CellBuilder setValue(final byte[] value);
CellBuilder setValue(final byte[] value, final int vOffset, final int vLength);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 3a8307c..f320083 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -564,40 +564,30 @@ public final class CellUtil {
* Note : Now only CPs can create cell with tags using the CP environment
* @return A new cell which is having the extra tags also added to it.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
- * Use CP environment to build Cell using {@link ExtendedCellBuilder}
*
*/
@Deprecated
public static Cell createCell(Cell cell, List<Tag> tags) {
- return createCell(cell, Tag.fromList(tags));
+ return PrivateCellUtil.createCell(cell, tags);
}
/**
* Now only CPs can create cell with tags using the CP environment
* @return A new cell which is having the extra tags also added to it.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
- * Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/
@Deprecated
public static Cell createCell(Cell cell, byte[] tags) {
- if (cell instanceof ByteBufferCell) {
- return new PrivateCellUtil.TagRewriteByteBufferCell((ByteBufferCell) cell, tags);
- }
- return new PrivateCellUtil.TagRewriteCell(cell, tags);
+ return PrivateCellUtil.createCell(cell, tags);
}
/**
* Now only CPs can create cell with tags using the CP environment
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
- * Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/
@Deprecated
public static Cell createCell(Cell cell, byte[] value, byte[] tags) {
- if (cell instanceof ByteBufferCell) {
- return new PrivateCellUtil.ValueAndTagRewriteByteBufferCell((ByteBufferCell) cell, value,
- tags);
- }
- return new PrivateCellUtil.ValueAndTagRewriteCell(cell, value, tags);
+ return PrivateCellUtil.createCell(cell, value, tags);
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java
index 81ca018..31df296 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java
@@ -131,4 +131,50 @@ public interface ExtendedCell extends RawCell, HeapSize, Cloneable {
* @param ts buffer containing the timestamp value
*/
void setTimestamp(byte[] ts) throws IOException;
+
+ /**
+ * A region-specific unique monotonically increasing sequence ID given to each Cell. It always
+ * exists for cells in the memstore but is not retained forever. It will be kept for
+ * {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's
+ * row is no longer involved in any operations that require strict consistency.
+ * @return seqId (always > 0 if exists), or 0 if it no longer exists
+ */
+ long getSequenceId();
+
+ /**
+ * Contiguous raw bytes representing tags that may start at any index in the containing array.
+ * @return the tags byte array
+ */
+ byte[] getTagsArray();
+
+ /**
+ * @return the first offset where the tags start in the Cell
+ */
+ int getTagsOffset();
+
+ /**
+ * HBase internally uses 2 bytes to store tags length in Cell. As the tags length is always a
+ * non-negative number, to make good use of the sign bit, the max of tags length is defined 2 *
+ * Short.MAX_VALUE + 1 = 65535. As a result, the return type is int, because a short is not
+ * capable of handling that. Please note that even if the return type is int, the max tags length
+ * is far less than Integer.MAX_VALUE.
+ * @return the total length of the tags in the Cell.
+ */
+ int getTagsLength();
+
+ /**
+ * {@inheritDoc}
+ * <p>
+ * Note : This does not expose the internal types of Cells like {@link KeyValue.Type#Maximum} and
+ * {@link KeyValue.Type#Minimum}
+ */
+ @Override
+ default DataType getType() {
+ return PrivateCellUtil.toDataType(getTypeByte());
+ }
+
+ /**
+ * @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc
+ */
+ byte getTypeByte();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java
index 57fa44e..b964d67 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
+import java.util.List;
+
import org.apache.yetus.audience.InterfaceAudience;
/**
@@ -26,8 +28,8 @@ import org.apache.yetus.audience.InterfaceAudience;
* Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance.
* TODO: ditto for ByteBufferCell?
*/
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
-public interface ExtendedCellBuilder extends CellBuilder {
+@InterfaceAudience.Private
+public interface ExtendedCellBuilder extends RawCellBuilder {
@Override
ExtendedCellBuilder setRow(final byte[] row);
@Override
@@ -47,7 +49,7 @@ public interface ExtendedCellBuilder extends CellBuilder {
ExtendedCellBuilder setTimestamp(final long timestamp);
@Override
- ExtendedCellBuilder setType(final DataType type);
+ ExtendedCellBuilder setType(final Cell.DataType type);
ExtendedCellBuilder setType(final byte type);
@@ -62,11 +64,17 @@ public interface ExtendedCellBuilder extends CellBuilder {
@Override
ExtendedCellBuilder clear();
- // TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[]
+ // we have this method for performance reasons so that if one could create a cell directly from
+ // the tag byte[] of the cell without having to convert to a list of Tag(s) and again adding it
+ // back.
ExtendedCellBuilder setTags(final byte[] tags);
- // TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[]
+ // we have this method for performance reasons so that if one could create a cell directly from
+ // the tag byte[] of the cell without having to convert to a list of Tag(s) and again adding it
+ // back.
ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength);
+ @Override
+ ExtendedCellBuilder setTags(List<Tag> tags);
/**
* Internal usage. Be careful before you use this while building a cell
* @param seqId set the seqId
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java
index 38778fb..f3acdf4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java
@@ -24,25 +24,17 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class ExtendedCellBuilderFactory {
- public static ExtendedCellBuilder create(CellBuilderType type) {
- return create(type, true);
- }
-
/**
* Allows creating a cell with the given CellBuilderType.
* @param type the type of CellBuilder(DEEP_COPY or SHALLOW_COPY).
- * @param allowSeqIdUpdate if seqId can be updated. CPs are not allowed to update
- * the seqId
* @return the cell that is created
*/
- public static ExtendedCellBuilder create(CellBuilderType type, boolean allowSeqIdUpdate) {
+ public static ExtendedCellBuilder create(CellBuilderType type) {
switch (type) {
case SHALLOW_COPY:
- // CPs are not allowed to update seqID and they always use DEEP_COPY. So we have not
- // passing 'allowSeqIdUpdate' to IndividualBytesFieldCellBuilder
return new IndividualBytesFieldCellBuilder();
case DEEP_COPY:
- return new KeyValueBuilder(allowSeqIdUpdate);
+ return new KeyValueBuilder();
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java
index 536dbdc..770b61d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
+import java.util.List;
+
import org.apache.commons.lang3.ArrayUtils;
import org.apache.yetus.audience.InterfaceAudience;
@@ -40,12 +42,6 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
protected byte[] tags = null;
protected int tagsOffset = 0;
protected int tagsLength = 0;
- // Will go away once we do with RawCellBuilder
- protected boolean allowSeqIdUpdate = false;
-
- public ExtendedCellBuilderImpl(boolean allowSeqIdUpdate) {
- this.allowSeqIdUpdate = allowSeqIdUpdate;
- }
@Override
public ExtendedCellBuilder setRow(final byte[] row) {
@@ -93,8 +89,8 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
}
@Override
- public ExtendedCellBuilder setType(final DataType type) {
- this.type = toKeyValueType(type);
+ public ExtendedCellBuilder setType(final Cell.DataType type) {
+ this.type = PrivateCellUtil.toTypeByte(type);
return this;
}
@@ -131,12 +127,15 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
}
@Override
+ public ExtendedCellBuilder setTags(List<Tag> tags) {
+ byte[] tagBytes = TagUtil.fromList(tags);
+ return setTags(tagBytes);
+ }
+
+ @Override
public ExtendedCellBuilder setSequenceId(final long seqId) {
- if (allowSeqIdUpdate) {
- this.seqId = seqId;
- return this;
- }
- throw new UnsupportedOperationException("SeqId cannot be set on this cell");
+ this.seqId = seqId;
+ return this;
}
private void checkBeforeBuild() {
@@ -175,15 +174,4 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
tagsLength = 0;
return this;
}
-
- private static KeyValue.Type toKeyValueType(DataType type) {
- switch (type) {
- case Put: return KeyValue.Type.Put;
- case Delete: return KeyValue.Type.Delete;
- case DeleteColumn: return KeyValue.Type.DeleteColumn;
- case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion;
- case DeleteFamily: return KeyValue.Type.DeleteFamily;
- default: throw new UnsupportedOperationException("Unsupported data type:" + type);
- }
- }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java
index 7093b4b..a25bd19 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java
@@ -18,6 +18,13 @@
package org.apache.hadoop.hbase;
+import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@@ -297,4 +304,30 @@ public class IndividualBytesFieldCell implements ExtendedCell {
public String toString() {
return CellUtil.toString(this, true);
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
+ if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
+ return Optional
+ .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java
index 62febf8..8a0168e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java
@@ -22,14 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl {
- public IndividualBytesFieldCellBuilder() {
- this(true);
- }
-
- public IndividualBytesFieldCellBuilder(boolean allowSeqIdUpdate) {
- super(allowSeqIdUpdate);
- }
-
@Override
public ExtendedCell innerBuild() {
return new IndividualBytesFieldCell(row, rOffset, rLength,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 7093650..88e7d88 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -19,6 +19,7 @@
*/
package org.apache.hadoop.hbase;
+import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import static org.apache.hadoop.hbase.util.Bytes.len;
import java.io.DataInput;
@@ -29,8 +30,10 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -1522,19 +1525,6 @@ public class KeyValue implements ExtendedCell {
}
/**
- * Returns any tags embedded in the KeyValue. Used in testcases.
- * @return The tags
- */
- @Override
- public List<Tag> getTags() {
- int tagsLength = getTagsLength();
- if (tagsLength == 0) {
- return EMPTY_ARRAY_LIST;
- }
- return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength);
- }
-
- /**
* @return the backing array of the entire KeyValue (all KeyValue fields are in a single array)
*/
@Override
@@ -2564,4 +2554,30 @@ public class KeyValue implements ExtendedCell {
kv.setSequenceId(this.getSequenceId());
return kv;
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
+ if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
+ return Optional
+ .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java
index 4f01992..9480b71 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java
@@ -22,14 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class KeyValueBuilder extends ExtendedCellBuilderImpl {
- KeyValueBuilder() {
- this(true);
- }
-
- KeyValueBuilder(boolean allowSeqIdUpdate) {
- super(allowSeqIdUpdate);
- }
-
@Override
protected ExtendedCell innerBuild() {
KeyValue kv = new KeyValue(row, rOffset, rLength,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
index df080f3..e52ed84 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
@@ -21,6 +21,7 @@ import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import com.google.common.annotations.VisibleForTesting;
+
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
@@ -31,6 +32,7 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
+
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.io.HeapSize;
@@ -43,6 +45,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
+
/**
* Utility methods helpful slinging {@link Cell} instances. It has more powerful and
* rich set of APIs than those in {@link CellUtil} for internal usage.
@@ -107,7 +110,7 @@ public final class PrivateCellUtil {
* @return A new cell which is having the extra tags also added to it.
*/
public static Cell createCell(Cell cell, List<Tag> tags) {
- return createCell(cell, Tag.fromList(tags));
+ return createCell(cell, TagUtil.fromList(tags));
}
/**
@@ -311,6 +314,32 @@ public final class PrivateCellUtil {
Cell clonedBaseCell = ((ExtendedCell) this.cell).deepClone();
return new TagRewriteCell(clonedBaseCell, this.tags);
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
+ if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
+ return Optional
+ .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
static class TagRewriteByteBufferCell extends ByteBufferCell implements ExtendedCell {
@@ -544,6 +573,33 @@ public final class PrivateCellUtil {
public int getTagsPosition() {
return 0;
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsPosition();
+ int pos = offset;
+ int tagLen;
+ while (pos < offset + length) {
+ ByteBuffer tagsBuffer = getTagsByteBuffer();
+ tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
+ if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
+ return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
static class ValueAndTagRewriteCell extends TagRewriteCell {
@@ -928,7 +984,7 @@ public final class PrivateCellUtil {
return CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
- private static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
+ public static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
final int length) {
return new Iterator<Tag>() {
private int pos = offset;
@@ -1231,6 +1287,29 @@ public final class PrivateCellUtil {
cell.getQualifierLength());
}
+ public static Cell.DataType toDataType(byte type) {
+ Type codeToType = KeyValue.Type.codeToType(type);
+ switch (codeToType) {
+ case Put: return Cell.DataType.Put;
+ case Delete: return Cell.DataType.Delete;
+ case DeleteColumn: return Cell.DataType.DeleteColumn;
+ case DeleteFamily: return Cell.DataType.DeleteFamily;
+ case DeleteFamilyVersion: return Cell.DataType.DeleteFamilyVersion;
+ default: throw new UnsupportedOperationException("Invalid type of cell "+type);
+ }
+ }
+
+ public static KeyValue.Type toTypeByte(Cell.DataType type) {
+ switch (type) {
+ case Put: return KeyValue.Type.Put;
+ case Delete: return KeyValue.Type.Delete;
+ case DeleteColumn: return KeyValue.Type.DeleteColumn;
+ case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion;
+ case DeleteFamily: return KeyValue.Type.DeleteFamily;
+ default: throw new UnsupportedOperationException("Unsupported data type:" + type);
+ }
+ }
+
/**
* Compare cell's value against given comparator
* @param cell
@@ -1345,6 +1424,32 @@ public final class PrivateCellUtil {
public int getTagsLength() {
return 0;
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
+ if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
+ return Optional
+ .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
/**
@@ -1498,6 +1603,33 @@ public final class PrivateCellUtil {
public int getValuePosition() {
return 0;
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsPosition();
+ int pos = offset;
+ int tagLen;
+ while (pos < offset + length) {
+ ByteBuffer tagsBuffer = getTagsByteBuffer();
+ tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
+ if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
+ return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
private static class FirstOnRowCell extends EmptyCell {
@@ -1547,6 +1679,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Maximum.getCode();
}
+
+ @Override
+ public DataType getType() {
+ throw new UnsupportedOperationException();
+ }
}
private static class FirstOnRowByteBufferCell extends EmptyByteBufferCell {
@@ -1597,6 +1734,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Maximum.getCode();
}
+
+ @Override
+ public DataType getType() {
+ throw new UnsupportedOperationException();
+ }
}
private static class LastOnRowByteBufferCell extends EmptyByteBufferCell {
@@ -1647,6 +1789,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Minimum.getCode();
}
+
+ @Override
+ public DataType getType() {
+ throw new UnsupportedOperationException();
+ }
}
private static class FirstOnRowColByteBufferCell extends FirstOnRowByteBufferCell {
@@ -1875,6 +2022,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Minimum.getCode();
}
+
+ @Override
+ public DataType getType() {
+ throw new UnsupportedOperationException();
+ }
}
private static class LastOnRowColCell extends LastOnRowCell {
@@ -2060,6 +2212,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.DeleteFamily.getCode();
}
+
+ @Override
+ public DataType getType() {
+ return DataType.DeleteFamily;
+ }
}
/**
@@ -2890,5 +3047,4 @@ public final class PrivateCellUtil {
public static Cell createFirstDeleteFamilyCellOnRow(final byte[] row, final byte[] fam) {
return new FirstOnRowDeleteFamilyCell(row, fam);
}
-
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java
index 9e25a9a..4cda7d5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java
@@ -41,19 +41,14 @@ public interface RawCell extends Cell {
* Creates a list of tags in the current cell
* @return a list of tags
*/
- default List<Tag> getTags() {
- return PrivateCellUtil.getTags(this);
- }
+ List<Tag> getTags();
/**
* Returns the specific tag of the given type
* @param type the type of the tag
* @return the specific tag if available or null
*/
- // TODO : Move to individual cell impl
- default Optional<Tag> getTag(byte type) {
- return PrivateCellUtil.getTag(this, type);
- }
+ Optional<Tag> getTag(byte type);
/**
* Check the length of tags. If it is invalid, throw IllegalArgumentException
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
index 8709814..6f9bfdc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
@@ -20,7 +20,6 @@
package org.apache.hadoop.hbase;
import java.nio.ByteBuffer;
-import java.util.List;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -153,38 +152,6 @@ public interface Tag {
}
/**
- * Write a list of tags into a byte array
- * @param tags The list of tags
- * @return the serialized tag data as bytes
- */
- // TODO : Remove this when we move to RawCellBuilder
- public static byte[] fromList(List<Tag> tags) {
- if (tags == null || tags.isEmpty()) {
- return HConstants.EMPTY_BYTE_ARRAY;
- }
- int length = 0;
- for (Tag tag : tags) {
- length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
- }
- byte[] b = new byte[length];
- int pos = 0;
- int tlen;
- for (Tag tag : tags) {
- tlen = tag.getValueLength();
- pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
- pos = Bytes.putByte(b, pos, tag.getType());
- if (tag.hasArray()) {
- pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
- } else {
- ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
- pos, tlen);
- pos += tlen;
- }
- }
- return b;
- }
-
- /**
* Converts the value bytes of the given tag into a long value
* @param tag The Tag
* @return value as long
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
index 6ad66ba..34c78a5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
@@ -137,6 +137,41 @@ public final class TagUtil {
}
/**
+ * Write a list of tags into a byte array
+ * Note : these are all purely internal APIs. It helps in
+ * cases where we have set of tags and we would want to create a cell out of it. Say in Mobs we
+ * create a reference tags to indicate the presence of mob data. Also note that these are not
+ * exposed to CPs also
+ * @param tags The list of tags
+ * @return the serialized tag data as bytes
+ */
+ public static byte[] fromList(List<Tag> tags) {
+ if (tags == null || tags.isEmpty()) {
+ return HConstants.EMPTY_BYTE_ARRAY;
+ }
+ int length = 0;
+ for (Tag tag : tags) {
+ length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
+ }
+ byte[] b = new byte[length];
+ int pos = 0;
+ int tlen;
+ for (Tag tag : tags) {
+ tlen = tag.getValueLength();
+ pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
+ pos = Bytes.putByte(b, pos, tag.getType());
+ if (tag.hasArray()) {
+ pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
+ } else {
+ ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
+ pos, tlen);
+ pos += tlen;
+ }
+ }
+ return b;
+ }
+
+ /**
* Iterator returned when no Tags. Used by CellUtil too.
*/
static final Iterator<Tag> EMPTY_TAGS_ITR = new Iterator<Tag>() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
index 9bcda01..f4d3c40 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
@@ -21,8 +21,14 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferCell;
+import org.apache.hadoop.hbase.ByteBufferTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
@@ -32,6 +38,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.io.TagCompressionContext;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.io.util.StreamUtils;
@@ -475,6 +482,32 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
// This is not used in actual flow. Throwing UnsupportedOperationException
throw new UnsupportedOperationException();
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(getTagsArray(), pos, Tag.TAG_LENGTH_SIZE);
+ if (getTagsArray()[pos + Tag.TAG_LENGTH_SIZE] == type) {
+ return Optional
+ .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + Tag.TAG_LENGTH_SIZE));
+ }
+ pos += Tag.TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
protected static class OffheapDecodedCell extends ByteBufferCell implements ExtendedCell {
@@ -720,6 +753,35 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
// This is not used in actual flow. Throwing UnsupportedOperationException
throw new UnsupportedOperationException();
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsPosition();
+ int pos = offset;
+ int tagLen;
+ while (pos < offset + length) {
+ ByteBuffer tagsBuffer = getTagsByteBuffer();
+ tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, Tag.TAG_LENGTH_SIZE);
+ if (ByteBufferUtils.toByte(tagsBuffer, pos + Tag.TAG_LENGTH_SIZE) == type) {
+ return Optional
+ .ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + Tag.TAG_LENGTH_SIZE));
+ }
+ pos += Tag.TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
+
}
protected abstract static class BufferedEncodedSeeker<STATE extends SeekerState>
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java
index ad18547..5c6c65a 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java
@@ -41,7 +41,7 @@ public class TestCellBuilder {
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
- .setType(CellBuilder.DataType.Put)
+ .setType(Cell.DataType.Put)
.setValue(value)
.build();
row[0] = NEW_DATA;
@@ -64,7 +64,7 @@ public class TestCellBuilder {
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
- .setType(CellBuilder.DataType.Put)
+ .setType(Cell.DataType.Put)
.setValue(value)
.build();
row[0] = NEW_DATA;
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index 0395c09..4ab6bce 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -198,6 +198,11 @@ public class TestCellUtil {
// TODO Auto-generated method stub
return 0;
}
+
+ @Override
+ public DataType getType() {
+ return PrivateCellUtil.toDataType(getTypeByte());
+ }
};
/**
@@ -613,5 +618,10 @@ public class TestCellUtil {
public int getTagsLength() {
return this.kv.getTagsLength();
}
+
+ @Override
+ public DataType getType() {
+ return PrivateCellUtil.toDataType(getTypeByte());
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index 86891ae..c6b7265 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -737,5 +737,10 @@ public class TestKeyValue extends TestCase {
public byte[] getTagsArray() {
return this.kv.getTagsArray();
}
+
+ @Override
+ public DataType getType() {
+ return PrivateCellUtil.toDataType(getTypeByte());
+ }
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
index 5d95fde..e460316 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
@@ -19,10 +19,22 @@
package org.apache.hadoop.hbase.client.example;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadLocalRandom;
+import java.util.concurrent.TimeUnit;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.TableName;
@@ -39,18 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ForkJoinPool;
-import java.util.concurrent.Future;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.ThreadLocalRandom;
-import java.util.concurrent.TimeUnit;
-
/**
* Example on how to use HBase's {@link Connection} and {@link Table} in a
@@ -226,7 +226,7 @@ public class MultiThreadedClientExample extends Configured implements Tool {
.setFamily(FAMILY)
.setQualifier(QUAL)
.setTimestamp(p.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(value)
.build());
puts.add(p);
@@ -263,7 +263,7 @@ public class MultiThreadedClientExample extends Configured implements Tool {
.setFamily(FAMILY)
.setQualifier(QUAL)
.setTimestamp(p.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(value)
.build());
t.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
index 863ea89..cf7796b 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java
@@ -22,7 +22,6 @@ import java.util.Optional;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
-import org.apache.hadoop.hbase.CellBuilder.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@@ -89,7 +88,7 @@ public class ValueRewritingObserver implements RegionObserver, RegionCoprocessor
cellBuilder.setFamily(CellUtil.cloneFamily(c));
cellBuilder.setQualifier(CellUtil.cloneQualifier(c));
cellBuilder.setTimestamp(c.getTimestamp());
- cellBuilder.setType(DataType.Put);
+ cellBuilder.setType(Cell.DataType.Put);
// Make sure each cell gets a unique value
byte[] clonedValue = new byte[replacedValue.length];
System.arraycopy(replacedValue, 0, clonedValue, 0, replacedValue.length);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
index 55d9ac3..63637b5 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java
@@ -29,7 +29,6 @@ import java.util.stream.IntStream;
import org.apache.commons.lang3.mutable.MutableLong;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@@ -80,7 +79,7 @@ public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObs
private Cell createCell(byte[] row, byte[] family, byte[] qualifier, long ts, long value) {
return CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row)
- .setType(CellBuilder.DataType.Put).setFamily(family).setQualifier(qualifier)
+ .setType(Cell.DataType.Put).setFamily(family).setQualifier(qualifier)
.setTimestamp(ts).setValue(Bytes.toBytes(value)).build();
}
@@ -250,7 +249,7 @@ public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObs
.setQualifier(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength())
.setValue(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())
- .setType(CellBuilder.DataType.Put).setTimestamp(ts).build());
+ .setType(Cell.DataType.Put).setTimestamp(ts).build());
}
}
c.getEnvironment().getRegion().put(put);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java
index 7f94f93..77c9e22 100644
--- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java
+++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java
@@ -46,7 +46,7 @@ public class TestPBCell {
@Test
public void testRoundTrip() {
final Cell cell = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"),
- Bytes.toBytes("qual"), Bytes.toBytes("val"));
+ Bytes.toBytes("qual"), Bytes.toBytes("val"));
CellProtos.Cell c = ProtobufUtil.toCell(cell), decoded;
PositionedByteRange pbr = new SimplePositionedByteRange(c.getSerializedSize());
pbr.setPosition(0);
@@ -54,6 +54,7 @@ public class TestPBCell {
pbr.setPosition(0);
decoded = CODEC.decode(pbr);
assertEquals(encodedLength, pbr.getPosition());
- assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded)));
+ assertTrue(CellUtil.equals(cell, ProtobufUtil
+ .toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded)));
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java
index 38ff59b..ae47e7a 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java
@@ -17,15 +17,23 @@
*/
package org.apache.hadoop.hbase.util;
+import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.Tag;
import org.apache.yetus.audience.InterfaceAudience;
/**
@@ -268,4 +276,30 @@ public class MapReduceCell extends ByteBufferCell implements ExtendedCell {
throw new RuntimeException(e);
}
}
+
+ @Override
+ public Optional<Tag> getTag(byte type) {
+ int length = getTagsLength();
+ int offset = getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
+ if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
+ return Optional
+ .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return Optional.ofNullable(null);
+ }
+
+ @Override
+ public List<Tag> getTags() {
+ List<Tag> tags = new ArrayList<>();
+ Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index dead804..8c1cb5b 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -39,11 +39,10 @@ import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Append;
@@ -56,6 +55,7 @@ import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class RowResource extends ResourceBase {
@@ -246,7 +246,7 @@ public class RowResource extends ResourceBase {
.setFamily(parts[0])
.setQualifier(parts[1])
.setTimestamp(cell.getTimestamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(cell.getValue())
.build());
}
@@ -321,7 +321,7 @@ public class RowResource extends ResourceBase {
.setFamily(parts[0])
.setQualifier(parts[1])
.setTimestamp(timestamp)
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(message)
.build());
table = servlet.getTable(tableResource.getName());
@@ -518,7 +518,7 @@ public class RowResource extends ResourceBase {
.setFamily(parts[0])
.setQualifier(parts[1])
.setTimestamp(cell.getTimestamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(cell.getValue())
.build());
if(Bytes.equals(col,
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
index 3380639..84e6d25 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
@@ -24,8 +24,8 @@ import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.RawCellBuilder;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -127,7 +127,7 @@ public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment<Reg
/**
* Returns a CellBuilder so that coprocessors can build cells. These cells can also include tags.
* Note that this builder does not support updating seqId of the cells
- * @return the ExtendedCellBuilder
+ * @return the RawCellBuilder
*/
- ExtendedCellBuilder getCellBuilder();
+ RawCellBuilder getCellBuilder();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
index 14b2466..136453a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java
@@ -35,7 +35,7 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseIOException;
@@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.master.RackManager;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
-
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -184,7 +183,7 @@ public class FavoredNodeAssignmentHelper {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(FAVOREDNODES_QUALIFIER)
.setTimestamp(EnvironmentEdgeManager.currentTime())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(favoredNodes)
.build());
LOG.debug("Create the region " + regionInfo.getRegionNameAsString() +
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
index 6a138ff..174272e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java
@@ -26,37 +26,35 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZKNamespaceManager;
-import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
-
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* This is a helper class used internally to manage the namespace metadata that is stored in
@@ -160,7 +158,7 @@ public class TableNamespaceManager {
.setFamily(TableDescriptorBuilder.NAMESPACE_FAMILY_INFO_BYTES)
.setQualifier(TableDescriptorBuilder.NAMESPACE_COL_DESC_BYTES)
.setTimestamp(p.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(ProtobufUtil.toProtoNamespaceDescriptor(ns).toByteArray())
.build());
nsTable.put(p);
http://git-wip-us.apache.org/repos/asf/hbase/blob/b4056d26/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
index 0b49b36..079dbd5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java
@@ -23,10 +23,11 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilder;
+import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HConstants;
@@ -51,7 +52,6 @@ import org.apache.zookeeper.KeeperException;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-
/**
* Store Region State to hbase:meta table.
*/
@@ -185,7 +185,7 @@ public class RegionStateStore {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getServerNameColumn(replicaId))
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(regionLocation.getServerName()))
.build());
info.append(", regionLocation=").append(regionLocation);
@@ -195,7 +195,7 @@ public class RegionStateStore {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getStateColumn(replicaId))
.setTimestamp(put.getTimeStamp())
- .setType(CellBuilder.DataType.Put)
+ .setType(DataType.Put)
.setValue(Bytes.toBytes(state.name()))
.build());
LOG.info(info);