You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by an...@apache.org on 2016/01/06 16:58:35 UTC
[3/3] hbase git commit: HBASE-12593 Tags to work with ByteBuffer.
HBASE-12593 Tags to work with ByteBuffer.
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a9b671b3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a9b671b3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a9b671b3
Branch: refs/heads/master
Commit: a9b671b31f07ade8968b42956aa60c722032dcc8
Parents: 893a54c
Author: anoopsjohn <an...@gmail.com>
Authored: Wed Jan 6 21:28:06 2016 +0530
Committer: anoopsjohn <an...@gmail.com>
Committed: Wed Jan 6 21:28:06 2016 +0530
----------------------------------------------------------------------
.../apache/hadoop/hbase/client/Mutation.java | 9 +-
.../hadoop/hbase/protobuf/ProtobufUtil.java | 16 +-
.../org/apache/hadoop/hbase/ArrayBackedTag.java | 143 ++++++++++++
.../java/org/apache/hadoop/hbase/CellUtil.java | 120 +++++++++-
.../java/org/apache/hadoop/hbase/KeyValue.java | 22 +-
.../org/apache/hadoop/hbase/OffheapTag.java | 83 +++++++
.../main/java/org/apache/hadoop/hbase/Tag.java | 191 +++-------------
.../java/org/apache/hadoop/hbase/TagUtil.java | 219 +++++++++++++++++++
.../hadoop/hbase/io/util/StreamUtils.java | 47 +++-
.../hadoop/hbase/util/ByteBufferUtils.java | 23 ++
.../hbase/util/test/RedundantKVGenerator.java | 7 +-
.../org/apache/hadoop/hbase/TestKeyValue.java | 28 ++-
.../hadoop/hbase/TestOffheapKeyValue.java | 25 +--
.../hbase/codec/TestCellCodecWithTags.java | 32 +--
.../hbase/codec/TestKeyValueCodecWithTags.java | 32 +--
.../hbase/io/TestTagCompressionContext.java | 3 +-
.../util/TestByteRangeWithKVSerialization.java | 3 +-
.../row/data/TestRowDataTrivialWithTags.java | 5 +-
.../hbase/rest/PerformanceEvaluation.java | 5 +-
.../hbase/io/hfile/HFilePrettyPrinter.java | 10 +-
.../hadoop/hbase/mapreduce/TextSortReducer.java | 3 +-
.../hbase/mapreduce/TsvImporterMapper.java | 3 +-
.../hbase/mob/DefaultMobStoreCompactor.java | 4 +-
.../hbase/mob/DefaultMobStoreFlusher.java | 5 +-
.../apache/hadoop/hbase/mob/MobConstants.java | 3 +-
.../org/apache/hadoop/hbase/mob/MobUtils.java | 10 +-
.../compactions/PartitionedMobCompactor.java | 3 +-
.../hbase/mob/mapreduce/MemStoreWrapper.java | 5 +-
.../hadoop/hbase/regionserver/HMobStore.java | 6 +-
.../hadoop/hbase/regionserver/HRegion.java | 25 ++-
.../hadoop/hbase/regionserver/HStore.java | 39 ++--
.../security/access/AccessControlLists.java | 11 +-
.../hbase/security/access/AccessController.java | 71 +++---
.../DefaultVisibilityLabelServiceImpl.java | 82 ++++---
.../visibility/VisibilityController.java | 66 +++---
.../VisibilityReplicationEndpoint.java | 7 +-
.../security/visibility/VisibilityUtils.java | 56 ++---
.../hadoop/hbase/wal/WALPrettyPrinter.java | 7 +-
.../hadoop/hbase/PerformanceEvaluation.java | 4 +-
.../hbase/client/TestResultSizeEstimation.java | 5 +-
.../io/encoding/TestDataBlockEncoders.java | 25 ++-
.../hbase/io/encoding/TestEncodedSeekers.java | 3 +-
.../io/encoding/TestPrefixTreeEncoding.java | 5 +-
.../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 5 +-
.../apache/hadoop/hbase/io/hfile/TestHFile.java | 3 +-
.../hadoop/hbase/io/hfile/TestHFileBlock.java | 5 +-
.../hbase/io/hfile/TestHFileWriterV3.java | 3 +-
.../hadoop/hbase/io/hfile/TestReseekTo.java | 5 +-
.../hadoop/hbase/io/hfile/TestSeekTo.java | 11 +-
.../hbase/regionserver/TestHMobStore.java | 5 +-
.../hadoop/hbase/regionserver/TestHRegion.java | 10 +-
.../TestStoreFileScannerWithTagCompression.java | 12 +-
.../hadoop/hbase/regionserver/TestTags.java | 22 +-
.../wal/TestKeyValueCompression.java | 5 +-
.../wal/TestWALCellCodecWithCompression.java | 10 +-
.../replication/TestReplicationWithTags.java | 6 +-
.../security/access/TestAccessController.java | 3 +-
.../ExpAsStringVisibilityLabelServiceImpl.java | 49 +++--
...sibilityLabelReplicationWithExpAsString.java | 3 +-
.../TestVisibilityLabelsReplication.java | 16 +-
.../apache/hadoop/hbase/util/HFileTestUtil.java | 17 +-
.../util/LoadTestDataGeneratorWithTags.java | 3 +-
62 files changed, 1083 insertions(+), 581 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 665c59c..9a550f9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -124,7 +125,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
* @param qualifier
* @param ts
* @param value
- * @param tags - Specify the Tags as an Array {@link KeyValue.Tag}
+ * @param tags - Specify the Tags as an Array
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tags) {
@@ -138,7 +139,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value,
- Tag[] tags) {
+ Tag[] tags) {
return new KeyValue(this.row, 0, this.row == null ? 0 : this.row.length,
family, 0, family == null ? 0 : family.length,
qualifier, ts, KeyValue.Type.Put, value, tags != null ? Arrays.asList(tags) : null);
@@ -219,11 +220,11 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
c.getQualifierLength()));
stringMap.put("timestamp", c.getTimestamp());
stringMap.put("vlen", c.getValueLength());
- List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ List<Tag> tags = CellUtil.getTags(c);
if (tags != null) {
List<String> tagsString = new ArrayList<String>();
for (Tag t : tags) {
- tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(t.getValue()));
+ tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(t)));
}
stringMap.put("tag", tagsString);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index c02309b..f5e4305 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Consistency;
@@ -580,20 +581,17 @@ public final class ProtobufUtil {
if (qv.hasTimestamp()) {
ts = qv.getTimestamp();
}
- byte[] tags;
+ byte[] allTagsBytes;
if (qv.hasTags()) {
- tags = qv.getTags().toByteArray();
- Object[] array = Tag.asList(tags, 0, (short)tags.length).toArray();
- Tag[] tagArray = new Tag[array.length];
- for(int i = 0; i< array.length; i++) {
- tagArray[i] = (Tag)array[i];
- }
+ allTagsBytes = qv.getTags().toByteArray();
if(qv.hasDeleteType()) {
byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null;
put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts,
- fromDeleteType(qv.getDeleteType()), null, tags));
+ fromDeleteType(qv.getDeleteType()), null, allTagsBytes));
} else {
- put.addImmutable(family, qualifier, ts, value, tagArray);
+ List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length);
+ Tag[] tagsArray = new Tag[tags.size()];
+ put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
}
} else {
if(qv.hasDeleteType()) {
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java
new file mode 100644
index 0000000..2f4bb75
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ArrayBackedTag.java
@@ -0,0 +1,143 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * This is a {@link Tag} implementation in which value is backed by an on heap byte array.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class ArrayBackedTag implements Tag {
+ private final byte type;// TODO extra type state needed?
+ private final byte[] bytes;
+ private int offset = 0;
+ private int length = 0;
+
+ /**
+ * The special tag will write the length of each tag and that will be
+ * followed by the type and then the actual tag.
+ * So every time the length part is parsed we need to add + 1 byte to it to
+ * get the type and then get the actual tag.
+ */
+ public ArrayBackedTag(byte tagType, String tag) {
+ this(tagType, Bytes.toBytes(tag));
+ }
+
+ /**
+ * Format for a tag :
+ * {@code <length of tag - 2 bytes><type code - 1 byte><tag>} tag length is serialized
+ * using 2 bytes only but as this will be unsigned, we can have max tag length of
+ * (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length.
+ */
+ public ArrayBackedTag(byte tagType, byte[] tag) {
+ int tagLength = tag.length + TYPE_LENGTH_SIZE;
+ if (tagLength > MAX_TAG_LENGTH) {
+ throw new IllegalArgumentException(
+ "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
+ }
+ length = TAG_LENGTH_SIZE + tagLength;
+ bytes = new byte[length];
+ int pos = Bytes.putAsShort(bytes, 0, tagLength);
+ pos = Bytes.putByte(bytes, pos, tagType);
+ Bytes.putBytes(bytes, pos, tag, 0, tag.length);
+ this.type = tagType;
+ }
+
+ /**
+ * Creates a Tag from the specified byte array and offset. Presumes
+ * <code>bytes</code> content starting at <code>offset</code> is formatted as
+ * a Tag blob.
+ * The bytes to include the tag type, tag length and actual tag bytes.
+ * @param offset offset to start of Tag
+ */
+ public ArrayBackedTag(byte[] bytes, int offset) {
+ this(bytes, offset, getLength(bytes, offset));
+ }
+
+ private static int getLength(byte[] bytes, int offset) {
+ return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE);
+ }
+
+ /**
+ * Creates a Tag from the specified byte array, starting at offset, and for length
+ * <code>length</code>. Presumes <code>bytes</code> content starting at <code>offset</code> is
+ * formatted as a Tag blob.
+ */
+ public ArrayBackedTag(byte[] bytes, int offset, int length) {
+ if (length > MAX_TAG_LENGTH) {
+ throw new IllegalArgumentException(
+ "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
+ }
+ this.bytes = bytes;
+ this.offset = offset;
+ this.length = length;
+ this.type = bytes[offset + TAG_LENGTH_SIZE];
+ }
+
+ /**
+ * @return The byte array backing this Tag.
+ */
+ public byte[] getValueArray() {
+ return this.bytes;
+ }
+
+ /**
+ * @return the tag type
+ */
+ public byte getType() {
+ return this.type;
+ }
+
+ /**
+ * @return Length of actual tag bytes within the backed buffer
+ */
+ public int getValueLength() {
+ return this.length - INFRASTRUCTURE_SIZE;
+ }
+
+ /**
+ * @return Offset of actual tag bytes within the backed buffer
+ */
+ public int getValueOffset() {
+ return this.offset + INFRASTRUCTURE_SIZE;
+ }
+
+ @Override
+ public boolean hasArray() {
+ return true;
+ }
+
+ @Override
+ public ByteBuffer getValueByteBuffer() {
+ return ByteBuffer.wrap(bytes);
+ }
+
+ @Override
+ public String toString() {
+ return "[Tag type : " + this.type + ", value : "
+ + Bytes.toStringBinary(bytes, getValueOffset(), getValueLength()) + "]";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 0d34137..1ec6afa 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -19,11 +19,13 @@
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
+import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.DataOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
@@ -108,8 +110,8 @@ public final class CellUtil {
/**
* Returns tag value in a new byte array. If server-side, use
- * {@link Tag#getBuffer()} with appropriate {@link Tag#getTagOffset()} and
- * {@link Tag#getTagLength()} instead to save on allocations.
+ * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} and
+ * {@link Tag#getValueLength()} instead to save on allocations.
* @param cell
* @return tag value in a new byte array.
*/
@@ -749,7 +751,10 @@ public final class CellUtil {
* @param offset
* @param length
* @return iterator for the tags
+ * @deprecated As of 2.0.0 and will be removed in 3.0.0
+ * Instead use {@link #tagsIterator(Cell)}
*/
+ @Deprecated
public static Iterator<Tag> tagsIterator(final byte[] tags, final int offset, final int length) {
return new Iterator<Tag>() {
private int pos = offset;
@@ -764,7 +769,7 @@ public final class CellUtil {
public Tag next() {
if (hasNext()) {
int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE);
- Tag tag = new Tag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE);
+ Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE);
this.pos += Bytes.SIZEOF_SHORT + curTagLen;
return tag;
}
@@ -778,6 +783,115 @@ public final class CellUtil {
};
}
+ private static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
+ final int length) {
+ return new Iterator<Tag>() {
+ private int pos = offset;
+ private int endOffset = offset + length - 1;
+
+ @Override
+ public boolean hasNext() {
+ return this.pos < endOffset;
+ }
+
+ @Override
+ public Tag next() {
+ if (hasNext()) {
+ int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE);
+ Tag tag = new OffheapTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE);
+ this.pos += Bytes.SIZEOF_SHORT + curTagLen;
+ return tag;
+ }
+ return null;
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+ };
+ }
+
+ private static final Iterator<Tag> EMPTY_TAGS_ITR = new Iterator<Tag>() {
+ @Override
+ public boolean hasNext() {
+ return false;
+ }
+
+ @Override
+ public Tag next() {
+ return null;
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+ };
+
+ /**
+ * Util method to iterate through the tags in the given cell.
+ *
+ * @param cell The Cell over which tags iterator is needed.
+ * @return iterator for the tags
+ */
+ public static Iterator<Tag> tagsIterator(final Cell cell) {
+ final int tagsLength = cell.getTagsLength();
+ // Save an object allocation where we can
+ if (tagsLength == 0) {
+ return EMPTY_TAGS_ITR;
+ }
+ if (cell instanceof ByteBufferedCell) {
+ return tagsIterator(((ByteBufferedCell) cell).getTagsByteBuffer(),
+ ((ByteBufferedCell) cell).getTagsPosition(), tagsLength);
+ }
+ return tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
+ }
+
+ /**
+ * @param cell The Cell
+ * @return Tags in the given Cell as a List
+ */
+ public static List<Tag> getTags(Cell cell) {
+ List<Tag> tags = new ArrayList<Tag>();
+ Iterator<Tag> tagsItr = tagsIterator(cell);
+ while (tagsItr.hasNext()) {
+ tags.add(tagsItr.next());
+ }
+ return tags;
+ }
+
+ /**
+ * Retrieve Cell's first tag, matching the passed in type
+ *
+ * @param cell The Cell
+ * @param type Type of the Tag to retrieve
+ * @return null if there is no tag of the passed in tag type
+ */
+ public static Tag getTag(Cell cell, byte type){
+ boolean bufferBacked = cell instanceof ByteBufferedCell;
+ int length = cell.getTagsLength();
+ int offset = bufferBacked? ((ByteBufferedCell)cell).getTagsPosition():cell.getTagsOffset();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen;
+ if (bufferBacked) {
+ ByteBuffer tagsBuffer = ((ByteBufferedCell)cell).getTagsByteBuffer();
+ tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
+ if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
+ return new OffheapTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE);
+ }
+ } else {
+ tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE);
+ if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
+ return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE);
+ }
+ }
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return null;
+ }
+
/**
* Returns true if the first range start1...end1 overlaps with the second range
* start2...end2, assuming the byte arrays represent row keys
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 933dd1d..a30a24c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -894,7 +894,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
int tagsLength = 0;
if (tags != null && tags.length > 0) {
for (Tag t: tags) {
- tagsLength += t.getLength();
+ tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
}
checkForTagsLength(tagsLength);
@@ -928,7 +928,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tagsLength > 0) {
pos = Bytes.putAsShort(buffer, pos, tagsLength);
for (Tag t : tags) {
- pos = Bytes.putBytes(buffer, pos, t.getBuffer(), t.getOffset(), t.getLength());
+ int tlen = t.getValueLength();
+ pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE);
+ pos = Bytes.putByte(buffer, pos, t.getType());
+ TagUtil.copyValueTo(t, buffer, pos);
+ pos += tlen;
}
}
return keyValueLength;
@@ -1013,7 +1017,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
int tagsLength = 0;
if (tags != null && !tags.isEmpty()) {
for (Tag t : tags) {
- tagsLength += t.getLength();
+ tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
}
checkForTagsLength(tagsLength);
@@ -1053,7 +1057,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tagsLength > 0) {
pos = Bytes.putAsShort(bytes, pos, tagsLength);
for (Tag t : tags) {
- pos = Bytes.putBytes(bytes, pos, t.getBuffer(), t.getOffset(), t.getLength());
+ int tlen = t.getValueLength();
+ pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE);
+ pos = Bytes.putByte(bytes, pos, t.getType());
+ TagUtil.copyValueTo(t, bytes, pos);
+ pos += tlen;
}
}
return bytes;
@@ -1176,7 +1184,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tags != null) {
List<String> tagsString = new ArrayList<String>();
for (Tag t : tags) {
- tagsString.add((t.getType()) + ":" +Bytes.toStringBinary(t.getValue()));
+ tagsString.add((t.getType()) + ":" + TagUtil.getValueAsString(t));
}
stringMap.put("tag", tagsString);
}
@@ -1558,7 +1566,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tagsLength == 0) {
return EMPTY_ARRAY_LIST;
}
- return Tag.asList(getTagsArray(), getTagsOffset(), tagsLength);
+ return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength);
}
/**
@@ -2386,7 +2394,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
public static KeyValue cloneAndAddTags(Cell c, List<Tag> newTags) {
List<Tag> existingTags = null;
if(c.getTagsLength() > 0) {
- existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ existingTags = CellUtil.getTags(c);
existingTags.addAll(newTags);
} else {
existingTags = newTags;
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java
new file mode 100644
index 0000000..b3d65bb
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/OffheapTag.java
@@ -0,0 +1,83 @@
+/**
+ * Copyright The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.util.ByteBufferUtils;
+
+/**
+ * This is a {@link Tag} implementation in which value is backed by an off heap
+ * {@link java.nio.ByteBuffer}
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public class OffheapTag implements Tag {
+
+ private ByteBuffer buffer;
+ private int offset, length;
+ private byte type;
+
+ public OffheapTag(ByteBuffer buffer, int offset, int length) {
+ this.buffer = buffer;
+ this.offset = offset;
+ this.length = length;
+ this.type = ByteBufferUtils.toByte(buffer, offset + TAG_LENGTH_SIZE);
+ }
+
+ @Override
+ public byte getType() {
+ return this.type;
+ }
+
+ @Override
+ public int getValueOffset() {
+ return this.offset + INFRASTRUCTURE_SIZE;
+ }
+
+ @Override
+ public int getValueLength() {
+ return this.length - INFRASTRUCTURE_SIZE;
+ }
+
+ @Override
+ public boolean hasArray() {
+ return false;
+ }
+
+ @Override
+ public byte[] getValueArray() {
+ throw new UnsupportedOperationException(
+ "Tag is backed by an off heap buffer. Use getValueByteBuffer()");
+ }
+
+ @Override
+ public ByteBuffer getValueByteBuffer() {
+ return this.buffer;
+ }
+
+ @Override
+ public String toString() {
+ return "[Tag type : " + this.type + ", value : "
+ + ByteBufferUtils.toStringBinary(buffer, getValueOffset(), getValueLength()) + "]";
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
index 36b87b1..1d55baa 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
@@ -19,201 +19,60 @@
*/
package org.apache.hadoop.hbase;
-import java.util.ArrayList;
-import java.util.List;
+import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
+
/**
- * Tags are part of cells and helps to add metadata about the KVs.
- * Metadata could be ACLs per cells, visibility labels, etc.
+ * Tags are part of cells and helps to add metadata about them.
+ * Metadata could be ACLs, visibility labels, etc.
+ * <p>
+ * Each Tag is having a type (one byte) and value part. The max value length for a Tag is 65533.
+ * <p>
+ * See {@link TagType} for reserved tag types.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
-public class Tag {
+public interface Tag {
+
public final static int TYPE_LENGTH_SIZE = Bytes.SIZEOF_BYTE;
public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT;
public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE;
public static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE;
- private final byte type;
- private final byte[] bytes;
- private int offset = 0;
- private int length = 0;
-
- /**
- * The special tag will write the length of each tag and that will be
- * followed by the type and then the actual tag.
- * So every time the length part is parsed we need to add + 1 byte to it to
- * get the type and then get the actual tag.
- */
- public Tag(byte tagType, String tag) {
- this(tagType, Bytes.toBytes(tag));
- }
-
- /**
- * Format for a tag :
- * {@code <length of tag - 2 bytes><type code - 1 byte><tag>} tag length is serialized
- * using 2 bytes only but as this will be unsigned, we can have max tag length of
- * (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length.
- */
- public Tag(byte tagType, byte[] tag) {
- int tagLength = tag.length + TYPE_LENGTH_SIZE;
- if (tagLength > MAX_TAG_LENGTH) {
- throw new IllegalArgumentException(
- "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
- }
- length = TAG_LENGTH_SIZE + tagLength;
- bytes = new byte[length];
- int pos = Bytes.putAsShort(bytes, 0, tagLength);
- pos = Bytes.putByte(bytes, pos, tagType);
- Bytes.putBytes(bytes, pos, tag, 0, tag.length);
- this.type = tagType;
- }
-
- /**
- * Creates a Tag from the specified byte array and offset. Presumes
- * <code>bytes</code> content starting at <code>offset</code> is formatted as
- * a Tag blob.
- * The bytes to include the tag type, tag length and actual tag bytes.
- * @param offset offset to start of Tag
- */
- public Tag(byte[] bytes, int offset) {
- this(bytes, offset, getLength(bytes, offset));
- }
-
- private static int getLength(byte[] bytes, int offset) {
- return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE);
- }
-
- /**
- * Creates a Tag from the specified byte array, starting at offset, and for length
- * <code>length</code>. Presumes <code>bytes</code> content starting at <code>offset</code> is
- * formatted as a Tag blob.
- */
- public Tag(byte[] bytes, int offset, int length) {
- if (length > MAX_TAG_LENGTH) {
- throw new IllegalArgumentException(
- "Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
- }
- this.bytes = bytes;
- this.offset = offset;
- this.length = length;
- this.type = bytes[offset + TAG_LENGTH_SIZE];
- }
-
- /**
- * @return The byte array backing this Tag.
- */
- public byte[] getBuffer() {
- return this.bytes;
- }
-
/**
* @return the tag type
*/
- public byte getType() {
- return this.type;
- }
-
- /**
- * @return Length of actual tag bytes within the backed buffer
- */
- public int getTagLength() {
- return this.length - INFRASTRUCTURE_SIZE;
- }
-
- /**
- * @return Offset of actual tag bytes within the backed buffer
- */
- public int getTagOffset() {
- return this.offset + INFRASTRUCTURE_SIZE;
- }
-
- /**
- * Returns tag value in a new byte array.
- * Primarily for use client-side. If server-side, use
- * {@link #getBuffer()} with appropriate {@link #getTagOffset()} and {@link #getTagLength()}
- * instead to save on allocations.
- * @return tag value in a new byte array.
- */
- public byte[] getValue() {
- int tagLength = getTagLength();
- byte[] tag = new byte[tagLength];
- Bytes.putBytes(tag, 0, bytes, getTagOffset(), tagLength);
- return tag;
- }
+ byte getType();
/**
- * Creates the list of tags from the byte array b. Expected that b is in the
- * expected tag format
- * @param b
- * @param offset
- * @param length
- * @return List of tags
+ * @return Offset of tag value within the backed buffer
*/
- public static List<Tag> asList(byte[] b, int offset, int length) {
- List<Tag> tags = new ArrayList<Tag>();
- int pos = offset;
- while (pos < offset + length) {
- int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
- tags.add(new Tag(b, pos, tagLen + TAG_LENGTH_SIZE));
- pos += TAG_LENGTH_SIZE + tagLen;
- }
- return tags;
- }
+ int getValueOffset();
/**
- * Write a list of tags into a byte array
- * @param tags
- * @return the serialized tag data as bytes
+ * @return Length of tag value within the backed buffer
*/
- public static byte[] fromList(List<Tag> tags) {
- int length = 0;
- for (Tag tag: tags) {
- length += tag.length;
- }
- byte[] b = new byte[length];
- int pos = 0;
- for (Tag tag: tags) {
- System.arraycopy(tag.bytes, tag.offset, b, pos, tag.length);
- pos += tag.length;
- }
- return b;
- }
+ int getValueLength();
/**
- * Retrieve the first tag from the tags byte array matching the passed in tag type
- * @param b
- * @param offset
- * @param length
- * @param type
- * @return null if there is no tag of the passed in tag type
+ * Tells whether or not this Tag is backed by a byte array.
+ * @return true when this Tag is backed by byte array
*/
- public static Tag getTag(byte[] b, int offset, int length, byte type) {
- int pos = offset;
- while (pos < offset + length) {
- int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
- if(b[pos + TAG_LENGTH_SIZE] == type) {
- return new Tag(b, pos, tagLen + TAG_LENGTH_SIZE);
- }
- pos += TAG_LENGTH_SIZE + tagLen;
- }
- return null;
- }
+ boolean hasArray();
/**
- * Returns the total length of the entire tag entity
+ * @return The array containing the value bytes.
+ * @throws UnsupportedOperationException
+ * when {@link #hasArray()} return false. Use {@link #getValueByteBuffer()} in such
+ * situation
*/
- int getLength() {
- return this.length;
- }
+ byte[] getValueArray();
/**
- * Returns the offset of the entire tag entity
+ * @return The {@link java.nio.ByteBuffer} containing the value bytes.
*/
- int getOffset() {
- return this.offset;
- }
+ ByteBuffer getValueByteBuffer();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
new file mode 100644
index 0000000..15ddfc8
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.io.util.StreamUtils;
+import org.apache.hadoop.hbase.util.ByteBufferUtils;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
+
+@InterfaceAudience.Private
+public final class TagUtil {
+
+ /**
+ * Private constructor to keep this class from being instantiated.
+ */
+ private TagUtil(){}
+
+ /**
+ * Returns tag value in a new byte array.
+ * Primarily for use client-side. If server-side, use
+ * {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()}
+ * and {@link Tag#getValueLength()} instead to save on allocations.
+ *
+ * @param tag The Tag whose value to be returned
+ * @return tag value in a new byte array.
+ */
+ public static byte[] cloneValue(Tag tag) {
+ int tagLength = tag.getValueLength();
+ byte[] tagArr = new byte[tagLength];
+ if (tag.hasArray()) {
+ Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength);
+ } else {
+ ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(),
+ 0, tagLength);
+ }
+ return tagArr;
+ }
+
+ /**
+ * Creates list of tags from given byte array, expected that it is in the expected tag format.
+ *
+ * @param b The byte array
+ * @param offset The offset in array where tag bytes begin
+ * @param length Total length of all tags bytes
+ * @return List of tags
+ */
+ public static List<Tag> asList(byte[] b, int offset, int length) {
+ List<Tag> tags = new ArrayList<Tag>();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
+ tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE));
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return tags;
+ }
+
+ /**
+ * Creates list of tags from given ByteBuffer, expected that it is in the expected tag format.
+ *
+ * @param b The ByteBuffer
+ * @param offset The offset in ByteBuffer where tag bytes begin
+ * @param length Total length of all tags bytes
+ * @return List of tags
+ */
+ public static List<Tag> asList(ByteBuffer b, int offset, int length) {
+ List<Tag> tags = new ArrayList<Tag>();
+ int pos = offset;
+ while (pos < offset + length) {
+ int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE);
+ tags.add(new OffheapTag(b, pos, tagLen + TAG_LENGTH_SIZE));
+ pos += TAG_LENGTH_SIZE + tagLen;
+ }
+ return tags;
+ }
+
+ /**
+ * Write a list of tags into a byte array
+ *
+ * @param tags The list of tags
+ * @return the serialized tag data as bytes
+ */
+ public static byte[] fromList(List<Tag> tags) {
+ if (tags.isEmpty()) {
+ return HConstants.EMPTY_BYTE_ARRAY;
+ }
+ int length = 0;
+ for (Tag tag : tags) {
+ length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
+ }
+ byte[] b = new byte[length];
+ int pos = 0;
+ int tlen;
+ for (Tag tag : tags) {
+ tlen = tag.getValueLength();
+ pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
+ pos = Bytes.putByte(b, pos, tag.getType());
+ if (tag.hasArray()) {
+ pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
+ } else {
+ ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
+ pos, tlen);
+ pos += tlen;
+ }
+ }
+ return b;
+ }
+
+ /**
+ * Converts the value bytes of the given tag into a long value
+ * @param tag The Tag
+ * @return value as long
+ */
+ public static long getValueAsLong(Tag tag) {
+ if (tag.hasArray()) {
+ return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
+ }
+ return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset());
+ }
+
+ /**
+ * Converts the value bytes of the given tag into a byte value
+ * @param tag The Tag
+ * @return value as byte
+ */
+ public static byte getValueAsByte(Tag tag) {
+ if (tag.hasArray()) {
+ return tag.getValueArray()[tag.getValueOffset()];
+ }
+ return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset());
+ }
+
+ /**
+ * Converts the value bytes of the given tag into a String value
+ * @param tag The Tag
+ * @return value as String
+ */
+ public static String getValueAsString(Tag tag){
+ if(tag.hasArray()){
+ return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
+ }
+ return Bytes.toString(cloneValue(tag));
+ }
+
+ /**
+ * Matches the value part of given tags
+ * @param t1 Tag to match the value
+ * @param t2 Tag to match the value
+ * @return True if values of both tags are same.
+ */
+ public static boolean matchingValue(Tag t1, Tag t2) {
+ if (t1.hasArray() && t2.hasArray()) {
+ return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(),
+ t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
+ }
+ if (t1.hasArray()) {
+ return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(),
+ t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength());
+ }
+ if (t2.hasArray()) {
+ return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(),
+ t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
+ }
+ return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(),
+ t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength());
+ }
+
+ /**
+ * Copies the tag's value bytes to the given byte array
+ * @param tag The Tag
+ * @param out The byte array where to copy the Tag value.
+ * @param offset The offset within 'out' array where to copy the Tag value.
+ */
+ public static void copyValueTo(Tag tag, byte[] out, int offset) {
+ if (tag.hasArray()) {
+ Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
+ } else {
+ ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(),
+ offset, tag.getValueLength());
+ }
+ }
+
+ /**
+ * Reads an int value stored as a VInt at tag's given offset.
+ * @param tag The Tag
+ * @param offset The offset where VInt bytes begin
+ * @return A pair of the int value and number of bytes taken to store VInt
+ * @throws IOException When varint is malformed and not able to be read correctly
+ */
+ public static Pair<Integer, Integer> readVIntValuePart(Tag tag, int offset) throws IOException {
+ if (tag.hasArray()) {
+ return StreamUtils.readRawVarint32(tag.getValueArray(), offset);
+ }
+ return StreamUtils.readRawVarint32(tag.getValueByteBuffer(), offset);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
index 6e13b44..0e1c3ae 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.nio.ByteBuff;
@@ -127,9 +128,10 @@ public class StreamUtils {
* Offset in the input array where varInt is available
* @return A pair of integers in which first value is the actual decoded varInt value and second
* value as number of bytes taken by this varInt for it's storage in the input array.
- * @throws IOException
+ * @throws IOException When varint is malformed and not able to be read correctly
*/
- public static Pair<Integer, Integer> readRawVarint32(byte[] input, int offset) throws IOException {
+ public static Pair<Integer, Integer> readRawVarint32(byte[] input, int offset)
+ throws IOException {
int newOffset = offset;
byte tmp = input[newOffset++];
if (tmp >= 0) {
@@ -169,6 +171,47 @@ public class StreamUtils {
return new Pair<Integer, Integer>(result, newOffset - offset);
}
+ public static Pair<Integer, Integer> readRawVarint32(ByteBuffer input, int offset)
+ throws IOException {
+ int newOffset = offset;
+ byte tmp = input.get(newOffset++);
+ if (tmp >= 0) {
+ return new Pair<Integer, Integer>((int) tmp, newOffset - offset);
+ }
+ int result = tmp & 0x7f;
+ tmp = input.get(newOffset++);
+ if (tmp >= 0) {
+ result |= tmp << 7;
+ } else {
+ result |= (tmp & 0x7f) << 7;
+ tmp = input.get(newOffset++);
+ if (tmp >= 0) {
+ result |= tmp << 14;
+ } else {
+ result |= (tmp & 0x7f) << 14;
+ tmp = input.get(newOffset++);
+ if (tmp >= 0) {
+ result |= tmp << 21;
+ } else {
+ result |= (tmp & 0x7f) << 21;
+ tmp = input.get(newOffset++);
+ result |= tmp << 28;
+ if (tmp < 0) {
+ // Discard upper 32 bits.
+ for (int i = 0; i < 5; i++) {
+ tmp = input.get(newOffset++);
+ if (tmp >= 0) {
+ return new Pair<Integer, Integer>(result, newOffset - offset);
+ }
+ }
+ throw new IOException("Malformed varint");
+ }
+ }
+ }
+ }
+ return new Pair<Integer, Integer>(result, newOffset - offset);
+ }
+
public static short toShort(byte hi, byte lo) {
short s = (short) (((hi & 0xFF) << 8) | (lo & 0xFF));
Preconditions.checkArgument(s >= 0);
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
index 7bcc872..6e3fcaa 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
@@ -751,6 +751,29 @@ public final class ByteBufferUtils {
}
/**
+ * Converts a ByteBuffer to an int value
+ *
+ * @param buf The ByteBuffer
+ * @param offset Offset to int value
+ * @param length Number of bytes used to store the int value.
+ * @return the int value
+ * @throws IllegalArgumentException
+ * if there's not enough bytes left in the buffer after the given offset
+ */
+ public static int readAsInt(ByteBuffer buf, int offset, final int length) {
+ if (offset + length > buf.limit()) {
+ throw new IllegalArgumentException("offset (" + offset + ") + length (" + length
+ + ") exceed the" + " limit of the buffer: " + buf.limit());
+ }
+ int n = 0;
+ for(int i = offset; i < (offset + length); i++) {
+ n <<= 8;
+ n ^= toByte(buf, i) & 0xFF;
+ }
+ return n;
+ }
+
+ /**
* Reads a long value at the given buffer's offset.
* @param buffer
* @param offset
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
index b44a724..7dc3d5a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
@@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
@@ -280,8 +281,8 @@ public class RedundantKVGenerator {
}
if (useTags) {
- result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag(
- (byte) 1, "value1") }));
+ result.add(new KeyValue(row, family, qualifier, timestamp, value,
+ new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
} else {
result.add(new KeyValue(row, family, qualifier, timestamp, value));
}
@@ -365,7 +366,7 @@ public class RedundantKVGenerator {
}
if (useTags) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value,
- new Tag[] { new Tag((byte) 1, "value1") });
+ new Tag[] { new ArrayBackedTag((byte) 1, "value1") });
ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(),
keyValue.getOffset(), keyValue.getLength());
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index cc1e511..e233348 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -442,7 +442,7 @@ public class TestKeyValue extends TestCase {
byte[] metaValue1 = Bytes.toBytes("metaValue1");
byte[] metaValue2 = Bytes.toBytes("metaValue2");
KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] {
- new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) });
+ new ArrayBackedTag((byte) 1, metaValue1), new ArrayBackedTag((byte) 2, metaValue2) });
assertTrue(kv.getTagsLength() > 0);
assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
row.length));
@@ -458,44 +458,42 @@ public class TestKeyValue extends TestCase {
boolean meta1Ok = false, meta2Ok = false;
for (Tag tag : tags) {
if (tag.getType() == (byte) 1) {
- if (Bytes.equals(tag.getValue(), metaValue1)) {
+ if (Bytes.equals(TagUtil.cloneValue(tag), metaValue1)) {
meta1Ok = true;
}
} else {
- if (Bytes.equals(tag.getValue(), metaValue2)) {
+ if (Bytes.equals(TagUtil.cloneValue(tag), metaValue2)) {
meta2Ok = true;
}
}
}
assertTrue(meta1Ok);
assertTrue(meta2Ok);
- Iterator<Tag> tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(),
- kv.getTagsLength());
+ Iterator<Tag> tagItr = CellUtil.tagsIterator(kv);
//Iterator<Tag> tagItr = kv.tagsIterator();
assertTrue(tagItr.hasNext());
Tag next = tagItr.next();
- assertEquals(10, next.getTagLength());
+ assertEquals(10, next.getValueLength());
assertEquals((byte) 1, next.getType());
- Bytes.equals(next.getValue(), metaValue1);
+ Bytes.equals(TagUtil.cloneValue(next), metaValue1);
assertTrue(tagItr.hasNext());
next = tagItr.next();
- assertEquals(10, next.getTagLength());
+ assertEquals(10, next.getValueLength());
assertEquals((byte) 2, next.getType());
- Bytes.equals(next.getValue(), metaValue2);
+ Bytes.equals(TagUtil.cloneValue(next), metaValue2);
assertFalse(tagItr.hasNext());
- tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(),
- kv.getTagsLength());
+ tagItr = CellUtil.tagsIterator(kv);
assertTrue(tagItr.hasNext());
next = tagItr.next();
- assertEquals(10, next.getTagLength());
+ assertEquals(10, next.getValueLength());
assertEquals((byte) 1, next.getType());
- Bytes.equals(next.getValue(), metaValue1);
+ Bytes.equals(TagUtil.cloneValue(next), metaValue1);
assertTrue(tagItr.hasNext());
next = tagItr.next();
- assertEquals(10, next.getTagLength());
+ assertEquals(10, next.getValueLength());
assertEquals((byte) 2, next.getType());
- Bytes.equals(next.getValue(), metaValue2);
+ Bytes.equals(TagUtil.cloneValue(next), metaValue2);
assertFalse(tagItr.hasNext());
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java
index f021215..9e76fc5 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestOffheapKeyValue.java
@@ -43,8 +43,8 @@ public class TestOffheapKeyValue {
private static final byte[] fam2 = Bytes.toBytes(FAM2);
private static final byte[] qual1 = Bytes.toBytes(QUAL1);
private static final byte[] qual2 = Bytes.toBytes(QUAL2);
- private static final Tag t1 = new Tag((byte) 1, Bytes.toBytes("TAG1"));
- private static final Tag t2 = new Tag((byte) 2, Bytes.toBytes("TAG2"));
+ private static final Tag t1 = new ArrayBackedTag((byte) 1, Bytes.toBytes("TAG1"));
+ private static final Tag t2 = new ArrayBackedTag((byte) 2, Bytes.toBytes("TAG2"));
private static final ArrayList<Tag> tags = new ArrayList<Tag>();
static {
tags.add(t1);
@@ -158,17 +158,17 @@ public class TestOffheapKeyValue {
assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
// change tags to handle both onheap and offheap stuff
- List<Tag> resTags =
- Tag.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), offheapKV.getTagsLength());
+ List<Tag> resTags = TagUtil.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(),
+ offheapKV.getTagsLength());
Tag tag1 = resTags.get(0);
assertEquals(t1.getType(), tag1.getType());
- assertEquals(Bytes.toString(t1.getValue()), Bytes.toString(getTagValue(tag1)));
+ assertEquals(TagUtil.getValueAsString(t1), TagUtil.getValueAsString(tag1));
Tag tag2 = resTags.get(1);
assertEquals(tag2.getType(), tag2.getType());
- assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2)));
- Tag res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 2);
- assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2)));
- res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 3);
+ assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2));
+ Tag res = CellUtil.getTag(offheapKV, (byte) 2);
+ assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2));
+ res = CellUtil.getTag(offheapKV, (byte) 3);
assertNull(res);
}
@@ -195,11 +195,4 @@ public class TestOffheapKeyValue {
assertEquals(0L, offheapKeyOnlyKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte());
}
- // TODO : Can be moved to TagUtil
- private static byte[] getTagValue(Tag tag) {
- int tagLength = tag.getTagLength();
- byte[] tagBytes = new byte[tagLength];
- System.arraycopy(tag.getBuffer(), tag.getTagOffset(), tagBytes, 0, tagLength);
- return tagBytes;
- }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
index beff87a..cc70742 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.TagUtil;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -54,16 +56,16 @@ public class TestCellCodecWithTags {
Codec.Encoder encoder = codec.getEncoder(dos);
final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] {
- new Tag((byte) 1, Bytes.toBytes("teststring1")),
- new Tag((byte) 2, Bytes.toBytes("teststring2")) });
+ new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")),
+ new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) });
final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"),
- HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new Tag((byte) 1,
+ HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1,
Bytes.toBytes("teststring3")), });
final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] {
- new Tag((byte) 2, Bytes.toBytes("teststring4")),
- new Tag((byte) 2, Bytes.toBytes("teststring5")),
- new Tag((byte) 1, Bytes.toBytes("teststring6")) });
+ new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")),
+ new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")),
+ new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) });
encoder.write(cell1);
encoder.write(cell2);
@@ -77,36 +79,36 @@ public class TestCellCodecWithTags {
assertTrue(decoder.advance());
Cell c = decoder.current();
assertTrue(CellUtil.equals(c, cell1));
- List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(2, tags.size());
Tag tag = tags.get(0);
assertEquals(1, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, cell2));
- tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(1, tags.size());
tag = tags.get(0);
assertEquals(1, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, cell3));
- tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(3, tags.size());
tag = tags.get(0);
assertEquals(2, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag)));
tag = tags.get(2);
assertEquals(1, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag)));
assertFalse(decoder.advance());
dis.close();
assertEquals(offset, cis.getCount());
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
index 04fb9a9..238d0a6 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java
@@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.TagUtil;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -54,16 +56,16 @@ public class TestKeyValueCodecWithTags {
Codec.Encoder encoder = codec.getEncoder(dos);
final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] {
- new Tag((byte) 1, Bytes.toBytes("teststring1")),
- new Tag((byte) 2, Bytes.toBytes("teststring2")) });
+ new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")),
+ new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) });
final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"),
- HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new Tag((byte) 1,
+ HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1,
Bytes.toBytes("teststring3")), });
final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] {
- new Tag((byte) 2, Bytes.toBytes("teststring4")),
- new Tag((byte) 2, Bytes.toBytes("teststring5")),
- new Tag((byte) 1, Bytes.toBytes("teststring6")) });
+ new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")),
+ new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")),
+ new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) });
encoder.write(kv1);
encoder.write(kv2);
@@ -77,36 +79,36 @@ public class TestKeyValueCodecWithTags {
assertTrue(decoder.advance());
Cell c = decoder.current();
assertTrue(CellUtil.equals(c, kv1));
- List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(2, tags.size());
Tag tag = tags.get(0);
assertEquals(1, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, kv2));
- tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(1, tags.size());
tag = tags.get(0);
assertEquals(1, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, kv3));
- tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
+ tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(3, tags.size());
tag = tags.get(0);
assertEquals(2, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag)));
tag = tags.get(2);
assertEquals(1, tag.getType());
- assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), tag.getValue()));
+ assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag)));
assertFalse(decoder.advance());
dis.close();
assertEquals(offset, cis.getCount());
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
index f4c4afe..6c46cf2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/TestTagCompressionContext.java
@@ -28,6 +28,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -97,7 +98,7 @@ public class TestTagCompressionContext {
private KeyValue createKVWithTags(int noOfTags) {
List<Tag> tags = new ArrayList<Tag>();
for (int i = 0; i < noOfTags; i++) {
- tags.add(new Tag((byte) i, "tagValue" + i));
+ tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
}
KeyValue kv = new KeyValue(ROW, CF, Q, 1234L, V, tags);
return kv;
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
index bd2a29d..717e24c 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
@@ -22,6 +22,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert;
@@ -65,7 +66,7 @@ public class TestByteRangeWithKVSerialization {
int kvCount = 1000000;
List<KeyValue> kvs = new ArrayList<KeyValue>(kvCount);
int totalSize = 0;
- Tag[] tags = new Tag[] { new Tag((byte) 1, "tag1") };
+ Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 1, "tag1") };
for (int i = 0; i < kvCount; i++) {
KeyValue kv = new KeyValue(Bytes.toBytes(i), FAMILY, QUALIFIER, i, VALUE, tags);
kv.setSequenceId(i);
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
----------------------------------------------------------------------
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
index 3c3699b..a615155 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java
@@ -23,6 +23,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
@@ -46,9 +47,9 @@ public class TestRowDataTrivialWithTags extends BaseTestRowData{
static List<KeyValue> d = Lists.newArrayList();
static {
List<Tag> tagList = new ArrayList<Tag>();
- Tag t = new Tag((byte) 1, "visisbility");
+ Tag t = new ArrayBackedTag((byte) 1, "visisbility");
tagList.add(t);
- t = new Tag((byte) 2, "ACL");
+ t = new ArrayBackedTag((byte) 2, "ACL");
tagList.add(t);
d.add(new KeyValue(rA, cf, cq0, ts, v0, tagList));
d.add(new KeyValue(rB, cf, cq0, ts, v0, tagList));
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
index 8424bf9..dcd5b0a 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -1124,7 +1125,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
byte[] tag = generateData(this.rand, TAG_LENGTH);
Tag[] tags = new Tag[noOfTags];
for (int n = 0; n < noOfTags; n++) {
- Tag t = new Tag((byte) n, tag);
+ Tag t = new ArrayBackedTag((byte) n, tag);
tags[n] = t;
}
KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP,
@@ -1195,7 +1196,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
byte[] tag = generateData(this.rand, TAG_LENGTH);
Tag[] tags = new Tag[noOfTags];
for (int n = 0; n < noOfTags; n++) {
- Tag t = new Tag((byte) n, tag);
+ Tag t = new ArrayBackedTag((byte) n, tag);
tags[n] = t;
}
KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP,
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index 86d183b..cc202d4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -59,10 +59,11 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.mob.MobUtils;
@@ -367,11 +368,10 @@ public class HFilePrettyPrinter extends Configured implements Tool {
+ Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(),
cell.getValueLength()));
int i = 0;
- List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
+ List<Tag> tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
for (Tag tag : tags) {
- System.out.print(String.format(" T[%d]: %s", i++,
- Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength())));
+ System.out.print(String.format(" T[%d]: %s", i++, TagUtil.getValueAsString(tag)));
}
}
System.out.println();
@@ -411,7 +411,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
System.err.println("ERROR, wrong value format in mob reference cell "
+ CellUtil.getCellKeyAsString(cell));
} else {
- TableName tn = TableName.valueOf(tnTag.getValue());
+ TableName tn = TableName.valueOf(TagUtil.cloneValue(tnTag));
String mobFileName = MobUtils.getMobFileName(cell);
boolean exist = mobFileExists(fs, tn, mobFileName,
Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles);
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
index c201eb7..d2adbd4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
@@ -25,6 +25,7 @@ import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
@@ -169,7 +170,7 @@ public class TextSortReducer extends
// Add TTL directly to the KV so we can vary them when packing more than one KV
// into puts
if (ttl > 0) {
- tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
+ tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
}
for (int i = 0; i < parsed.getColumnCount(); i++) {
if (i == parser.getRowKeyColumnIndex() || i == parser.getTimestampKeyColumnIndex()
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java
index 98dc25e..e14874b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
@@ -170,7 +171,7 @@ extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put>
// Add TTL directly to the KV so we can vary them when packing more than one KV
// into puts
if (ttl > 0) {
- tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
+ tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
}
}
Put put = new Put(rowKey.copyBytes());
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
index f48bb94..b5f412d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
@@ -26,6 +26,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
@@ -167,7 +168,8 @@ public class DefaultMobStoreCompactor extends DefaultCompactor {
byte[] fileName = null;
StoreFile.Writer mobFileWriter = null, delFileWriter = null;
long mobCells = 0, deleteMarkersCount = 0;
- Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName());
+ Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
+ store.getTableName().getName());
long cellsCountCompactedToMob = 0, cellsCountCompactedFromMob = 0;
long cellsSizeCompactedToMob = 0, cellsSizeCompactedFromMob = 0;
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
index ff350bf..999d25c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
@@ -27,6 +27,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@@ -166,8 +167,8 @@ public class DefaultMobStoreFlusher extends DefaultStoreFlusher {
// the relative path is mobFiles
byte[] fileName = Bytes.toBytes(mobFileWriter.getPath().getName());
try {
- Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName()
- .getName());
+ Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
+ store.getTableName().getName());
List<Cell> cells = new ArrayList<Cell>();
boolean hasMore;
ScannerContext scannerContext =
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java
index 4bdfe97..82fc9cf 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java
@@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.mob;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
@@ -66,7 +67,7 @@ public final class MobConstants {
public static final String MOB_CACHE_EVICT_PERIOD = "hbase.mob.cache.evict.period";
public static final String MOB_CACHE_EVICT_REMAIN_RATIO = "hbase.mob.cache.evict.remain.ratio";
- public static final Tag MOB_REF_TAG = new Tag(TagType.MOB_REFERENCE_TAG_TYPE,
+ public static final Tag MOB_REF_TAG = new ArrayBackedTag(TagType.MOB_REFERENCE_TAG_TYPE,
HConstants.EMPTY_BYTE_ARRAY);
public static final float DEFAULT_EVICT_REMAIN_RATIO = 0.5f;
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
index d654788..52a19f5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobUtils.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -122,8 +123,7 @@ public final class MobUtils {
*/
public static boolean isMobReferenceCell(Cell cell) {
if (cell.getTagsLength() > 0) {
- Tag tag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(),
- TagType.MOB_REFERENCE_TAG_TYPE);
+ Tag tag = CellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE);
return tag != null;
}
return false;
@@ -136,9 +136,7 @@ public final class MobUtils {
*/
public static Tag getTableNameTag(Cell cell) {
if (cell.getTagsLength() > 0) {
- Tag tag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(),
- TagType.MOB_TABLE_NAME_TAG_TYPE);
- return tag;
+ return CellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE);
}
return null;
}
@@ -438,7 +436,7 @@ public final class MobUtils {
// snapshot for mob files.
tags.add(tableNameTag);
// Add the existing tags.
- tags.addAll(Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()));
+ tags.addAll(CellUtil.getTags(cell));
int valueLength = cell.getValueLength();
byte[] refValue = Bytes.add(Bytes.toBytes(valueLength), fileName);
KeyValue reference = new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
index ab9ee7e..6c6f115 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -113,7 +114,7 @@ public class PartitionedMobCompactor extends MobCompactor {
Configuration copyOfConf = new Configuration(conf);
copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f);
compactionCacheConfig = new CacheConfig(copyOfConf);
- tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName());
+ tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName());
cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a9b671b3/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java
index 3daef7e..5955cc2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/MemStoreWrapper.java
@@ -25,6 +25,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -156,8 +157,8 @@ public class MemStoreWrapper {
scanner = snapshot.getScanner();
scanner.seek(KeyValueUtil.createFirstOnRow(HConstants.EMPTY_START_ROW));
cell = null;
- Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, Bytes.toBytes(this.table.getName()
- .toString()));
+ Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
+ Bytes.toBytes(this.table.getName().toString()));
long updatedCount = 0;
while (null != (cell = scanner.next())) {
KeyValue reference = MobUtils.createMobRefKeyValue(cell, referenceValue, tableNameTag);