You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2013/02/14 13:58:21 UTC
svn commit: r1446147 [6/35] - in /hbase/branches/hbase-7290v2: ./ bin/ conf/
dev-support/ hbase-client/ hbase-common/
hbase-common/src/main/java/org/apache/hadoop/hbase/
hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/
hbase-common/src/m...
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java Thu Feb 14 12:58:12 2013
@@ -601,11 +601,1150 @@ public final class HFileProtos {
// @@protoc_insertion_point(class_scope:FileInfoProto)
}
+ public interface FileTrailerProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional uint64 fileInfoOffset = 1;
+ boolean hasFileInfoOffset();
+ long getFileInfoOffset();
+
+ // optional uint64 loadOnOpenDataOffset = 2;
+ boolean hasLoadOnOpenDataOffset();
+ long getLoadOnOpenDataOffset();
+
+ // optional uint64 uncompressedDataIndexSize = 3;
+ boolean hasUncompressedDataIndexSize();
+ long getUncompressedDataIndexSize();
+
+ // optional uint64 totalUncompressedBytes = 4;
+ boolean hasTotalUncompressedBytes();
+ long getTotalUncompressedBytes();
+
+ // optional uint32 dataIndexCount = 5;
+ boolean hasDataIndexCount();
+ int getDataIndexCount();
+
+ // optional uint32 metaIndexCount = 6;
+ boolean hasMetaIndexCount();
+ int getMetaIndexCount();
+
+ // optional uint64 entryCount = 7;
+ boolean hasEntryCount();
+ long getEntryCount();
+
+ // optional uint32 numDataIndexLevels = 8;
+ boolean hasNumDataIndexLevels();
+ int getNumDataIndexLevels();
+
+ // optional uint64 firstDataBlockOffset = 9;
+ boolean hasFirstDataBlockOffset();
+ long getFirstDataBlockOffset();
+
+ // optional uint64 lastDataBlockOffset = 10;
+ boolean hasLastDataBlockOffset();
+ long getLastDataBlockOffset();
+
+ // optional string comparatorClassName = 11;
+ boolean hasComparatorClassName();
+ String getComparatorClassName();
+
+ // optional uint32 compressionCodec = 12;
+ boolean hasCompressionCodec();
+ int getCompressionCodec();
+ }
+ public static final class FileTrailerProto extends
+ com.google.protobuf.GeneratedMessage
+ implements FileTrailerProtoOrBuilder {
+ // Use FileTrailerProto.newBuilder() to construct.
+ private FileTrailerProto(Builder builder) {
+ super(builder);
+ }
+ private FileTrailerProto(boolean noInit) {}
+
+ private static final FileTrailerProto defaultInstance;
+ public static FileTrailerProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FileTrailerProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // optional uint64 fileInfoOffset = 1;
+ public static final int FILEINFOOFFSET_FIELD_NUMBER = 1;
+ private long fileInfoOffset_;
+ public boolean hasFileInfoOffset() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getFileInfoOffset() {
+ return fileInfoOffset_;
+ }
+
+ // optional uint64 loadOnOpenDataOffset = 2;
+ public static final int LOADONOPENDATAOFFSET_FIELD_NUMBER = 2;
+ private long loadOnOpenDataOffset_;
+ public boolean hasLoadOnOpenDataOffset() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getLoadOnOpenDataOffset() {
+ return loadOnOpenDataOffset_;
+ }
+
+ // optional uint64 uncompressedDataIndexSize = 3;
+ public static final int UNCOMPRESSEDDATAINDEXSIZE_FIELD_NUMBER = 3;
+ private long uncompressedDataIndexSize_;
+ public boolean hasUncompressedDataIndexSize() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getUncompressedDataIndexSize() {
+ return uncompressedDataIndexSize_;
+ }
+
+ // optional uint64 totalUncompressedBytes = 4;
+ public static final int TOTALUNCOMPRESSEDBYTES_FIELD_NUMBER = 4;
+ private long totalUncompressedBytes_;
+ public boolean hasTotalUncompressedBytes() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getTotalUncompressedBytes() {
+ return totalUncompressedBytes_;
+ }
+
+ // optional uint32 dataIndexCount = 5;
+ public static final int DATAINDEXCOUNT_FIELD_NUMBER = 5;
+ private int dataIndexCount_;
+ public boolean hasDataIndexCount() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public int getDataIndexCount() {
+ return dataIndexCount_;
+ }
+
+ // optional uint32 metaIndexCount = 6;
+ public static final int METAINDEXCOUNT_FIELD_NUMBER = 6;
+ private int metaIndexCount_;
+ public boolean hasMetaIndexCount() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ public int getMetaIndexCount() {
+ return metaIndexCount_;
+ }
+
+ // optional uint64 entryCount = 7;
+ public static final int ENTRYCOUNT_FIELD_NUMBER = 7;
+ private long entryCount_;
+ public boolean hasEntryCount() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ public long getEntryCount() {
+ return entryCount_;
+ }
+
+ // optional uint32 numDataIndexLevels = 8;
+ public static final int NUMDATAINDEXLEVELS_FIELD_NUMBER = 8;
+ private int numDataIndexLevels_;
+ public boolean hasNumDataIndexLevels() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ public int getNumDataIndexLevels() {
+ return numDataIndexLevels_;
+ }
+
+ // optional uint64 firstDataBlockOffset = 9;
+ public static final int FIRSTDATABLOCKOFFSET_FIELD_NUMBER = 9;
+ private long firstDataBlockOffset_;
+ public boolean hasFirstDataBlockOffset() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ public long getFirstDataBlockOffset() {
+ return firstDataBlockOffset_;
+ }
+
+ // optional uint64 lastDataBlockOffset = 10;
+ public static final int LASTDATABLOCKOFFSET_FIELD_NUMBER = 10;
+ private long lastDataBlockOffset_;
+ public boolean hasLastDataBlockOffset() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ public long getLastDataBlockOffset() {
+ return lastDataBlockOffset_;
+ }
+
+ // optional string comparatorClassName = 11;
+ public static final int COMPARATORCLASSNAME_FIELD_NUMBER = 11;
+ private java.lang.Object comparatorClassName_;
+ public boolean hasComparatorClassName() {
+ return ((bitField0_ & 0x00000400) == 0x00000400);
+ }
+ public String getComparatorClassName() {
+ java.lang.Object ref = comparatorClassName_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ comparatorClassName_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getComparatorClassNameBytes() {
+ java.lang.Object ref = comparatorClassName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ comparatorClassName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional uint32 compressionCodec = 12;
+ public static final int COMPRESSIONCODEC_FIELD_NUMBER = 12;
+ private int compressionCodec_;
+ public boolean hasCompressionCodec() {
+ return ((bitField0_ & 0x00000800) == 0x00000800);
+ }
+ public int getCompressionCodec() {
+ return compressionCodec_;
+ }
+
+ private void initFields() {
+ fileInfoOffset_ = 0L;
+ loadOnOpenDataOffset_ = 0L;
+ uncompressedDataIndexSize_ = 0L;
+ totalUncompressedBytes_ = 0L;
+ dataIndexCount_ = 0;
+ metaIndexCount_ = 0;
+ entryCount_ = 0L;
+ numDataIndexLevels_ = 0;
+ firstDataBlockOffset_ = 0L;
+ lastDataBlockOffset_ = 0L;
+ comparatorClassName_ = "";
+ compressionCodec_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, fileInfoOffset_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, loadOnOpenDataOffset_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, uncompressedDataIndexSize_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt64(4, totalUncompressedBytes_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeUInt32(5, dataIndexCount_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeUInt32(6, metaIndexCount_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ output.writeUInt64(7, entryCount_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ output.writeUInt32(8, numDataIndexLevels_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ output.writeUInt64(9, firstDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ output.writeUInt64(10, lastDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000400) == 0x00000400)) {
+ output.writeBytes(11, getComparatorClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000800) == 0x00000800)) {
+ output.writeUInt32(12, compressionCodec_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, fileInfoOffset_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, loadOnOpenDataOffset_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, uncompressedDataIndexSize_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(4, totalUncompressedBytes_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(5, dataIndexCount_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(6, metaIndexCount_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(7, entryCount_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(8, numDataIndexLevels_);
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(9, firstDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(10, lastDataBlockOffset_);
+ }
+ if (((bitField0_ & 0x00000400) == 0x00000400)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(11, getComparatorClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000800) == 0x00000800)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(12, compressionCodec_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto other = (org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto) obj;
+
+ boolean result = true;
+ result = result && (hasFileInfoOffset() == other.hasFileInfoOffset());
+ if (hasFileInfoOffset()) {
+ result = result && (getFileInfoOffset()
+ == other.getFileInfoOffset());
+ }
+ result = result && (hasLoadOnOpenDataOffset() == other.hasLoadOnOpenDataOffset());
+ if (hasLoadOnOpenDataOffset()) {
+ result = result && (getLoadOnOpenDataOffset()
+ == other.getLoadOnOpenDataOffset());
+ }
+ result = result && (hasUncompressedDataIndexSize() == other.hasUncompressedDataIndexSize());
+ if (hasUncompressedDataIndexSize()) {
+ result = result && (getUncompressedDataIndexSize()
+ == other.getUncompressedDataIndexSize());
+ }
+ result = result && (hasTotalUncompressedBytes() == other.hasTotalUncompressedBytes());
+ if (hasTotalUncompressedBytes()) {
+ result = result && (getTotalUncompressedBytes()
+ == other.getTotalUncompressedBytes());
+ }
+ result = result && (hasDataIndexCount() == other.hasDataIndexCount());
+ if (hasDataIndexCount()) {
+ result = result && (getDataIndexCount()
+ == other.getDataIndexCount());
+ }
+ result = result && (hasMetaIndexCount() == other.hasMetaIndexCount());
+ if (hasMetaIndexCount()) {
+ result = result && (getMetaIndexCount()
+ == other.getMetaIndexCount());
+ }
+ result = result && (hasEntryCount() == other.hasEntryCount());
+ if (hasEntryCount()) {
+ result = result && (getEntryCount()
+ == other.getEntryCount());
+ }
+ result = result && (hasNumDataIndexLevels() == other.hasNumDataIndexLevels());
+ if (hasNumDataIndexLevels()) {
+ result = result && (getNumDataIndexLevels()
+ == other.getNumDataIndexLevels());
+ }
+ result = result && (hasFirstDataBlockOffset() == other.hasFirstDataBlockOffset());
+ if (hasFirstDataBlockOffset()) {
+ result = result && (getFirstDataBlockOffset()
+ == other.getFirstDataBlockOffset());
+ }
+ result = result && (hasLastDataBlockOffset() == other.hasLastDataBlockOffset());
+ if (hasLastDataBlockOffset()) {
+ result = result && (getLastDataBlockOffset()
+ == other.getLastDataBlockOffset());
+ }
+ result = result && (hasComparatorClassName() == other.hasComparatorClassName());
+ if (hasComparatorClassName()) {
+ result = result && getComparatorClassName()
+ .equals(other.getComparatorClassName());
+ }
+ result = result && (hasCompressionCodec() == other.hasCompressionCodec());
+ if (hasCompressionCodec()) {
+ result = result && (getCompressionCodec()
+ == other.getCompressionCodec());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasFileInfoOffset()) {
+ hash = (37 * hash) + FILEINFOOFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getFileInfoOffset());
+ }
+ if (hasLoadOnOpenDataOffset()) {
+ hash = (37 * hash) + LOADONOPENDATAOFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLoadOnOpenDataOffset());
+ }
+ if (hasUncompressedDataIndexSize()) {
+ hash = (37 * hash) + UNCOMPRESSEDDATAINDEXSIZE_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getUncompressedDataIndexSize());
+ }
+ if (hasTotalUncompressedBytes()) {
+ hash = (37 * hash) + TOTALUNCOMPRESSEDBYTES_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getTotalUncompressedBytes());
+ }
+ if (hasDataIndexCount()) {
+ hash = (37 * hash) + DATAINDEXCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getDataIndexCount();
+ }
+ if (hasMetaIndexCount()) {
+ hash = (37 * hash) + METAINDEXCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getMetaIndexCount();
+ }
+ if (hasEntryCount()) {
+ hash = (37 * hash) + ENTRYCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getEntryCount());
+ }
+ if (hasNumDataIndexLevels()) {
+ hash = (37 * hash) + NUMDATAINDEXLEVELS_FIELD_NUMBER;
+ hash = (53 * hash) + getNumDataIndexLevels();
+ }
+ if (hasFirstDataBlockOffset()) {
+ hash = (37 * hash) + FIRSTDATABLOCKOFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getFirstDataBlockOffset());
+ }
+ if (hasLastDataBlockOffset()) {
+ hash = (37 * hash) + LASTDATABLOCKOFFSET_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLastDataBlockOffset());
+ }
+ if (hasComparatorClassName()) {
+ hash = (37 * hash) + COMPARATORCLASSNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getComparatorClassName().hashCode();
+ }
+ if (hasCompressionCodec()) {
+ hash = (37 * hash) + COMPRESSIONCODEC_FIELD_NUMBER;
+ hash = (53 * hash) + getCompressionCodec();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ fileInfoOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ loadOnOpenDataOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ uncompressedDataIndexSize_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ totalUncompressedBytes_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ dataIndexCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000010);
+ metaIndexCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000020);
+ entryCount_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000040);
+ numDataIndexLevels_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000080);
+ firstDataBlockOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000100);
+ lastDataBlockOffset_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000200);
+ comparatorClassName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000400);
+ compressionCodec_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000800);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto build() {
+ org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto result = new org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.fileInfoOffset_ = fileInfoOffset_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.loadOnOpenDataOffset_ = loadOnOpenDataOffset_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.uncompressedDataIndexSize_ = uncompressedDataIndexSize_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.totalUncompressedBytes_ = totalUncompressedBytes_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.dataIndexCount_ = dataIndexCount_;
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ result.metaIndexCount_ = metaIndexCount_;
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ to_bitField0_ |= 0x00000040;
+ }
+ result.entryCount_ = entryCount_;
+ if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+ to_bitField0_ |= 0x00000080;
+ }
+ result.numDataIndexLevels_ = numDataIndexLevels_;
+ if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+ to_bitField0_ |= 0x00000100;
+ }
+ result.firstDataBlockOffset_ = firstDataBlockOffset_;
+ if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+ to_bitField0_ |= 0x00000200;
+ }
+ result.lastDataBlockOffset_ = lastDataBlockOffset_;
+ if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+ to_bitField0_ |= 0x00000400;
+ }
+ result.comparatorClassName_ = comparatorClassName_;
+ if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+ to_bitField0_ |= 0x00000800;
+ }
+ result.compressionCodec_ = compressionCodec_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.getDefaultInstance()) return this;
+ if (other.hasFileInfoOffset()) {
+ setFileInfoOffset(other.getFileInfoOffset());
+ }
+ if (other.hasLoadOnOpenDataOffset()) {
+ setLoadOnOpenDataOffset(other.getLoadOnOpenDataOffset());
+ }
+ if (other.hasUncompressedDataIndexSize()) {
+ setUncompressedDataIndexSize(other.getUncompressedDataIndexSize());
+ }
+ if (other.hasTotalUncompressedBytes()) {
+ setTotalUncompressedBytes(other.getTotalUncompressedBytes());
+ }
+ if (other.hasDataIndexCount()) {
+ setDataIndexCount(other.getDataIndexCount());
+ }
+ if (other.hasMetaIndexCount()) {
+ setMetaIndexCount(other.getMetaIndexCount());
+ }
+ if (other.hasEntryCount()) {
+ setEntryCount(other.getEntryCount());
+ }
+ if (other.hasNumDataIndexLevels()) {
+ setNumDataIndexLevels(other.getNumDataIndexLevels());
+ }
+ if (other.hasFirstDataBlockOffset()) {
+ setFirstDataBlockOffset(other.getFirstDataBlockOffset());
+ }
+ if (other.hasLastDataBlockOffset()) {
+ setLastDataBlockOffset(other.getLastDataBlockOffset());
+ }
+ if (other.hasComparatorClassName()) {
+ setComparatorClassName(other.getComparatorClassName());
+ }
+ if (other.hasCompressionCodec()) {
+ setCompressionCodec(other.getCompressionCodec());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ fileInfoOffset_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ loadOnOpenDataOffset_ = input.readUInt64();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ uncompressedDataIndexSize_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ totalUncompressedBytes_ = input.readUInt64();
+ break;
+ }
+ case 40: {
+ bitField0_ |= 0x00000010;
+ dataIndexCount_ = input.readUInt32();
+ break;
+ }
+ case 48: {
+ bitField0_ |= 0x00000020;
+ metaIndexCount_ = input.readUInt32();
+ break;
+ }
+ case 56: {
+ bitField0_ |= 0x00000040;
+ entryCount_ = input.readUInt64();
+ break;
+ }
+ case 64: {
+ bitField0_ |= 0x00000080;
+ numDataIndexLevels_ = input.readUInt32();
+ break;
+ }
+ case 72: {
+ bitField0_ |= 0x00000100;
+ firstDataBlockOffset_ = input.readUInt64();
+ break;
+ }
+ case 80: {
+ bitField0_ |= 0x00000200;
+ lastDataBlockOffset_ = input.readUInt64();
+ break;
+ }
+ case 90: {
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = input.readBytes();
+ break;
+ }
+ case 96: {
+ bitField0_ |= 0x00000800;
+ compressionCodec_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // optional uint64 fileInfoOffset = 1;
+ private long fileInfoOffset_ ;
+ public boolean hasFileInfoOffset() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public long getFileInfoOffset() {
+ return fileInfoOffset_;
+ }
+ public Builder setFileInfoOffset(long value) {
+ bitField0_ |= 0x00000001;
+ fileInfoOffset_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearFileInfoOffset() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ fileInfoOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 loadOnOpenDataOffset = 2;
+ private long loadOnOpenDataOffset_ ;
+ public boolean hasLoadOnOpenDataOffset() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getLoadOnOpenDataOffset() {
+ return loadOnOpenDataOffset_;
+ }
+ public Builder setLoadOnOpenDataOffset(long value) {
+ bitField0_ |= 0x00000002;
+ loadOnOpenDataOffset_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearLoadOnOpenDataOffset() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ loadOnOpenDataOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 uncompressedDataIndexSize = 3;
+ private long uncompressedDataIndexSize_ ;
+ public boolean hasUncompressedDataIndexSize() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getUncompressedDataIndexSize() {
+ return uncompressedDataIndexSize_;
+ }
+ public Builder setUncompressedDataIndexSize(long value) {
+ bitField0_ |= 0x00000004;
+ uncompressedDataIndexSize_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearUncompressedDataIndexSize() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ uncompressedDataIndexSize_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 totalUncompressedBytes = 4;
+ private long totalUncompressedBytes_ ;
+ public boolean hasTotalUncompressedBytes() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getTotalUncompressedBytes() {
+ return totalUncompressedBytes_;
+ }
+ public Builder setTotalUncompressedBytes(long value) {
+ bitField0_ |= 0x00000008;
+ totalUncompressedBytes_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearTotalUncompressedBytes() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ totalUncompressedBytes_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 dataIndexCount = 5;
+ private int dataIndexCount_ ;
+ public boolean hasDataIndexCount() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public int getDataIndexCount() {
+ return dataIndexCount_;
+ }
+ public Builder setDataIndexCount(int value) {
+ bitField0_ |= 0x00000010;
+ dataIndexCount_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearDataIndexCount() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ dataIndexCount_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 metaIndexCount = 6;
+ private int metaIndexCount_ ;
+ public boolean hasMetaIndexCount() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ public int getMetaIndexCount() {
+ return metaIndexCount_;
+ }
+ public Builder setMetaIndexCount(int value) {
+ bitField0_ |= 0x00000020;
+ metaIndexCount_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMetaIndexCount() {
+ bitField0_ = (bitField0_ & ~0x00000020);
+ metaIndexCount_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 entryCount = 7;
+ private long entryCount_ ;
+ public boolean hasEntryCount() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ public long getEntryCount() {
+ return entryCount_;
+ }
+ public Builder setEntryCount(long value) {
+ bitField0_ |= 0x00000040;
+ entryCount_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearEntryCount() {
+ bitField0_ = (bitField0_ & ~0x00000040);
+ entryCount_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint32 numDataIndexLevels = 8;
+ private int numDataIndexLevels_ ;
+ public boolean hasNumDataIndexLevels() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ public int getNumDataIndexLevels() {
+ return numDataIndexLevels_;
+ }
+ public Builder setNumDataIndexLevels(int value) {
+ bitField0_ |= 0x00000080;
+ numDataIndexLevels_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearNumDataIndexLevels() {
+ bitField0_ = (bitField0_ & ~0x00000080);
+ numDataIndexLevels_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 firstDataBlockOffset = 9;
+ private long firstDataBlockOffset_ ;
+ public boolean hasFirstDataBlockOffset() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ public long getFirstDataBlockOffset() {
+ return firstDataBlockOffset_;
+ }
+ public Builder setFirstDataBlockOffset(long value) {
+ bitField0_ |= 0x00000100;
+ firstDataBlockOffset_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearFirstDataBlockOffset() {
+ bitField0_ = (bitField0_ & ~0x00000100);
+ firstDataBlockOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 lastDataBlockOffset = 10;
+ private long lastDataBlockOffset_ ;
+ public boolean hasLastDataBlockOffset() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ public long getLastDataBlockOffset() {
+ return lastDataBlockOffset_;
+ }
+ public Builder setLastDataBlockOffset(long value) {
+ bitField0_ |= 0x00000200;
+ lastDataBlockOffset_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearLastDataBlockOffset() {
+ bitField0_ = (bitField0_ & ~0x00000200);
+ lastDataBlockOffset_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional string comparatorClassName = 11;
+ private java.lang.Object comparatorClassName_ = "";
+ public boolean hasComparatorClassName() {
+ return ((bitField0_ & 0x00000400) == 0x00000400);
+ }
+ public String getComparatorClassName() {
+ java.lang.Object ref = comparatorClassName_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ comparatorClassName_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setComparatorClassName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearComparatorClassName() {
+ bitField0_ = (bitField0_ & ~0x00000400);
+ comparatorClassName_ = getDefaultInstance().getComparatorClassName();
+ onChanged();
+ return this;
+ }
+ void setComparatorClassName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000400;
+ comparatorClassName_ = value;
+ onChanged();
+ }
+
+ // optional uint32 compressionCodec = 12;
+ private int compressionCodec_ ;
+ public boolean hasCompressionCodec() {
+ return ((bitField0_ & 0x00000800) == 0x00000800);
+ }
+ public int getCompressionCodec() {
+ return compressionCodec_;
+ }
+ public Builder setCompressionCodec(int value) {
+ bitField0_ |= 0x00000800;
+ compressionCodec_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearCompressionCodec() {
+ bitField0_ = (bitField0_ & ~0x00000800);
+ compressionCodec_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:FileTrailerProto)
+ }
+
+ static {
+ defaultInstance = new FileTrailerProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:FileTrailerProto)
+ }
+
private static com.google.protobuf.Descriptors.Descriptor
internal_static_FileInfoProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_FileInfoProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_FileTrailerProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_FileTrailerProto_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -616,9 +1755,18 @@ public final class HFileProtos {
static {
java.lang.String[] descriptorData = {
"\n\013HFile.proto\032\013hbase.proto\"2\n\rFileInfoPr" +
- "oto\022!\n\010mapEntry\030\001 \003(\0132\017.BytesBytesPairBA" +
- "\n*org.apache.hadoop.hbase.protobuf.gener" +
- "atedB\013HFileProtosH\001\210\001\001\240\001\001"
+ "oto\022!\n\010mapEntry\030\001 \003(\0132\017.BytesBytesPair\"\335" +
+ "\002\n\020FileTrailerProto\022\026\n\016fileInfoOffset\030\001 " +
+ "\001(\004\022\034\n\024loadOnOpenDataOffset\030\002 \001(\004\022!\n\031unc" +
+ "ompressedDataIndexSize\030\003 \001(\004\022\036\n\026totalUnc" +
+ "ompressedBytes\030\004 \001(\004\022\026\n\016dataIndexCount\030\005" +
+ " \001(\r\022\026\n\016metaIndexCount\030\006 \001(\r\022\022\n\nentryCou" +
+ "nt\030\007 \001(\004\022\032\n\022numDataIndexLevels\030\010 \001(\r\022\034\n\024" +
+ "firstDataBlockOffset\030\t \001(\004\022\033\n\023lastDataBl" +
+ "ockOffset\030\n \001(\004\022\033\n\023comparatorClassName\030\013",
+ " \001(\t\022\030\n\020compressionCodec\030\014 \001(\rBA\n*org.ap" +
+ "ache.hadoop.hbase.protobuf.generatedB\013HF" +
+ "ileProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -633,6 +1781,14 @@ public final class HFileProtos {
new java.lang.String[] { "MapEntry", },
org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class,
org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class);
+ internal_static_FileTrailerProto_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_FileTrailerProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_FileTrailerProto_descriptor,
+ new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", },
+ org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.class,
+ org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class);
return null;
}
};
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java Thu Feb 14 12:58:12 2013
@@ -1637,11 +1637,7 @@ public final class RPCProtos {
boolean hasMethodName();
String getMethodName();
- // optional uint64 clientProtocolVersion = 2;
- boolean hasClientProtocolVersion();
- long getClientProtocolVersion();
-
- // optional bytes request = 3;
+ // optional bytes request = 2;
boolean hasRequest();
com.google.protobuf.ByteString getRequest();
@@ -1710,21 +1706,11 @@ public final class RPCProtos {
}
}
- // optional uint64 clientProtocolVersion = 2;
- public static final int CLIENTPROTOCOLVERSION_FIELD_NUMBER = 2;
- private long clientProtocolVersion_;
- public boolean hasClientProtocolVersion() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public long getClientProtocolVersion() {
- return clientProtocolVersion_;
- }
-
- // optional bytes request = 3;
- public static final int REQUEST_FIELD_NUMBER = 3;
+ // optional bytes request = 2;
+ public static final int REQUEST_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString request_;
public boolean hasRequest() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
public com.google.protobuf.ByteString getRequest() {
return request_;
@@ -1734,7 +1720,7 @@ public final class RPCProtos {
public static final int REQUESTCLASSNAME_FIELD_NUMBER = 4;
private java.lang.Object requestClassName_;
public boolean hasRequestClassName() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
public String getRequestClassName() {
java.lang.Object ref = requestClassName_;
@@ -1764,7 +1750,6 @@ public final class RPCProtos {
private void initFields() {
methodName_ = "";
- clientProtocolVersion_ = 0L;
request_ = com.google.protobuf.ByteString.EMPTY;
requestClassName_ = "";
}
@@ -1788,12 +1773,9 @@ public final class RPCProtos {
output.writeBytes(1, getMethodNameBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeUInt64(2, clientProtocolVersion_);
+ output.writeBytes(2, request_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeBytes(3, request_);
- }
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(4, getRequestClassNameBytes());
}
getUnknownFields().writeTo(output);
@@ -1811,14 +1793,10 @@ public final class RPCProtos {
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
- .computeUInt64Size(2, clientProtocolVersion_);
+ .computeBytesSize(2, request_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(3, request_);
- }
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
.computeBytesSize(4, getRequestClassNameBytes());
}
size += getUnknownFields().getSerializedSize();
@@ -1849,11 +1827,6 @@ public final class RPCProtos {
result = result && getMethodName()
.equals(other.getMethodName());
}
- result = result && (hasClientProtocolVersion() == other.hasClientProtocolVersion());
- if (hasClientProtocolVersion()) {
- result = result && (getClientProtocolVersion()
- == other.getClientProtocolVersion());
- }
result = result && (hasRequest() == other.hasRequest());
if (hasRequest()) {
result = result && getRequest()
@@ -1877,10 +1850,6 @@ public final class RPCProtos {
hash = (37 * hash) + METHODNAME_FIELD_NUMBER;
hash = (53 * hash) + getMethodName().hashCode();
}
- if (hasClientProtocolVersion()) {
- hash = (37 * hash) + CLIENTPROTOCOLVERSION_FIELD_NUMBER;
- hash = (53 * hash) + hashLong(getClientProtocolVersion());
- }
if (hasRequest()) {
hash = (37 * hash) + REQUEST_FIELD_NUMBER;
hash = (53 * hash) + getRequest().hashCode();
@@ -2007,12 +1976,10 @@ public final class RPCProtos {
super.clear();
methodName_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
- clientProtocolVersion_ = 0L;
- bitField0_ = (bitField0_ & ~0x00000002);
request_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000004);
+ bitField0_ = (bitField0_ & ~0x00000002);
requestClassName_ = "";
- bitField0_ = (bitField0_ & ~0x00000008);
+ bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@@ -2058,14 +2025,10 @@ public final class RPCProtos {
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
- result.clientProtocolVersion_ = clientProtocolVersion_;
+ result.request_ = request_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
- result.request_ = request_;
- if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
- to_bitField0_ |= 0x00000008;
- }
result.requestClassName_ = requestClassName_;
result.bitField0_ = to_bitField0_;
onBuilt();
@@ -2086,9 +2049,6 @@ public final class RPCProtos {
if (other.hasMethodName()) {
setMethodName(other.getMethodName());
}
- if (other.hasClientProtocolVersion()) {
- setClientProtocolVersion(other.getClientProtocolVersion());
- }
if (other.hasRequest()) {
setRequest(other.getRequest());
}
@@ -2135,18 +2095,13 @@ public final class RPCProtos {
methodName_ = input.readBytes();
break;
}
- case 16: {
+ case 18: {
bitField0_ |= 0x00000002;
- clientProtocolVersion_ = input.readUInt64();
- break;
- }
- case 26: {
- bitField0_ |= 0x00000004;
request_ = input.readBytes();
break;
}
case 34: {
- bitField0_ |= 0x00000008;
+ bitField0_ |= 0x00000004;
requestClassName_ = input.readBytes();
break;
}
@@ -2192,31 +2147,10 @@ public final class RPCProtos {
onChanged();
}
- // optional uint64 clientProtocolVersion = 2;
- private long clientProtocolVersion_ ;
- public boolean hasClientProtocolVersion() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public long getClientProtocolVersion() {
- return clientProtocolVersion_;
- }
- public Builder setClientProtocolVersion(long value) {
- bitField0_ |= 0x00000002;
- clientProtocolVersion_ = value;
- onChanged();
- return this;
- }
- public Builder clearClientProtocolVersion() {
- bitField0_ = (bitField0_ & ~0x00000002);
- clientProtocolVersion_ = 0L;
- onChanged();
- return this;
- }
-
- // optional bytes request = 3;
+ // optional bytes request = 2;
private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
public boolean hasRequest() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
public com.google.protobuf.ByteString getRequest() {
return request_;
@@ -2225,13 +2159,13 @@ public final class RPCProtos {
if (value == null) {
throw new NullPointerException();
}
- bitField0_ |= 0x00000004;
+ bitField0_ |= 0x00000002;
request_ = value;
onChanged();
return this;
}
public Builder clearRequest() {
- bitField0_ = (bitField0_ & ~0x00000004);
+ bitField0_ = (bitField0_ & ~0x00000002);
request_ = getDefaultInstance().getRequest();
onChanged();
return this;
@@ -2240,7 +2174,7 @@ public final class RPCProtos {
// optional string requestClassName = 4;
private java.lang.Object requestClassName_ = "";
public boolean hasRequestClassName() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
+ return ((bitField0_ & 0x00000004) == 0x00000004);
}
public String getRequestClassName() {
java.lang.Object ref = requestClassName_;
@@ -2256,19 +2190,19 @@ public final class RPCProtos {
if (value == null) {
throw new NullPointerException();
}
- bitField0_ |= 0x00000008;
+ bitField0_ |= 0x00000004;
requestClassName_ = value;
onChanged();
return this;
}
public Builder clearRequestClassName() {
- bitField0_ = (bitField0_ & ~0x00000008);
+ bitField0_ = (bitField0_ & ~0x00000004);
requestClassName_ = getDefaultInstance().getRequestClassName();
onChanged();
return this;
}
void setRequestClassName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000008;
+ bitField0_ |= 0x00000004;
requestClassName_ = value;
onChanged();
}
@@ -3761,17 +3695,16 @@ public final class RPCProtos {
" \001(\0132\020.UserInformation\022?\n\010protocol\030\002 \001(\t" +
":-org.apache.hadoop.hbase.client.ClientP" +
"rotocol\"<\n\020RpcRequestHeader\022\016\n\006callId\030\001 " +
- "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"n\n\016RpcRequ" +
- "estBody\022\022\n\nmethodName\030\001 \002(\t\022\035\n\025clientPro" +
- "tocolVersion\030\002 \001(\004\022\017\n\007request\030\003 \001(\014\022\030\n\020r" +
- "equestClassName\030\004 \001(\t\"{\n\021RpcResponseHead",
- "er\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 \002(\0162\031.Rpc" +
- "ResponseHeader.Status\"+\n\006Status\022\013\n\007SUCCE" +
- "SS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017RpcRespons" +
- "eBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcException\022" +
- "\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstackTrace\030\002 \001" +
- "(\tB<\n*org.apache.hadoop.hbase.protobuf.g" +
- "eneratedB\tRPCProtosH\001\240\001\001"
+ "\002(\r\022\030\n\005tinfo\030\002 \001(\0132\t.RPCTInfo\"O\n\016RpcRequ" +
+ "estBody\022\022\n\nmethodName\030\001 \002(\t\022\017\n\007request\030\002" +
+ " \001(\014\022\030\n\020requestClassName\030\004 \001(\t\"{\n\021RpcRes" +
+ "ponseHeader\022\016\n\006callId\030\001 \002(\r\022)\n\006status\030\002 ",
+ "\002(\0162\031.RpcResponseHeader.Status\"+\n\006Status" +
+ "\022\013\n\007SUCCESS\020\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002\"#\n\017R" +
+ "pcResponseBody\022\020\n\010response\030\001 \001(\014\"9\n\014RpcE" +
+ "xception\022\025\n\rexceptionName\030\001 \002(\t\022\022\n\nstack" +
+ "Trace\030\002 \001(\tB<\n*org.apache.hadoop.hbase.p" +
+ "rotobuf.generatedB\tRPCProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -3807,7 +3740,7 @@ public final class RPCProtos {
internal_static_RpcRequestBody_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RpcRequestBody_descriptor,
- new java.lang.String[] { "MethodName", "ClientProtocolVersion", "Request", "RequestClassName", },
+ new java.lang.String[] { "MethodName", "Request", "RequestClassName", },
org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.class,
org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody.Builder.class);
internal_static_RpcResponseHeader_descriptor =
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/AccessControl.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/AccessControl.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/AccessControl.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/AccessControl.proto Thu Feb 14 12:58:12 2013
@@ -70,7 +70,7 @@ message RevokeResponse {
message UserPermissionsRequest {
- required bytes table = 1;
+ optional bytes table = 1;
}
message UserPermissionsResponse {
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Client.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Client.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Client.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Client.proto Thu Feb 14 12:58:12 2013
@@ -42,13 +42,12 @@ message Get {
required bytes row = 1;
repeated Column column = 2;
repeated NameBytesPair attribute = 3;
- optional uint64 lockId = 4;
- optional Filter filter = 5;
- optional TimeRange timeRange = 6;
- optional uint32 maxVersions = 7 [default = 1];
- optional bool cacheBlocks = 8 [default = true];
- optional uint32 storeLimit = 9;
- optional uint32 storeOffset = 10;
+ optional Filter filter = 4;
+ optional TimeRange timeRange = 5;
+ optional uint32 maxVersions = 6 [default = 1];
+ optional bool cacheBlocks = 7 [default = true];
+ optional uint32 storeLimit = 8;
+ optional uint32 storeOffset = 9;
}
message Result {
@@ -72,8 +71,22 @@ message GetRequest {
optional bool closestRowBefore = 3;
// The result isn't asked for, just check for
- // the existence. If specified, closestRowBefore
- // will be ignored
+ // the existence. If closestRowBefore specified,
+ // this will be ignored
+ optional bool existenceOnly = 4;
+}
+
+message MultiGetRequest {
+ required RegionSpecifier region = 1;
+ repeated Get get = 2;
+
+ // If the row to get doesn't exist, return the
+ // closest row before.
+ optional bool closestRowBefore = 3;
+
+ // The result isn't asked for, just check for
+ // the existence. If closestRowBefore specified,
+ // this will be ignored
optional bool existenceOnly = 4;
}
@@ -84,6 +97,13 @@ message GetResponse {
optional bool exists = 2;
}
+message MultiGetResponse {
+ repeated Result result = 1;
+
+ // used for Get to check existence only
+ repeated bool exists = 2;
+}
+
/**
* Condition to check if the value of a given cell (row,
* family, qualifier) matches a value via a given comparator.
@@ -109,8 +129,7 @@ message Mutate {
repeated ColumnValue columnValue = 3;
repeated NameBytesPair attribute = 4;
optional uint64 timestamp = 5;
- optional uint64 lockId = 6;
- optional bool writeToWAL = 7 [default = true];
+ optional bool writeToWAL = 6 [default = true];
// For some mutate, result may be returned, in which case,
// time range can be specified for potential performance gain
@@ -186,6 +205,7 @@ message Scan {
optional uint64 maxResultSize = 10;
optional uint32 storeLimit = 11;
optional uint32 storeOffset = 12;
+ optional bool loadColumnFamiliesOnDemand = 13; /* DO NOT add defaults to loadColumnFamiliesOnDemand. */
}
/**
@@ -217,24 +237,7 @@ message ScanResponse {
optional uint64 scannerId = 2;
optional bool moreResults = 3;
optional uint32 ttl = 4;
-}
-
-message LockRowRequest {
- required RegionSpecifier region = 1;
- repeated bytes row = 2;
-}
-
-message LockRowResponse {
- required uint64 lockId = 1;
- optional uint32 ttl = 2;
-}
-
-message UnlockRowRequest {
- required RegionSpecifier region = 1;
- required uint64 lockId = 2;
-}
-
-message UnlockRowResponse {
+ optional uint64 resultSizeBytes = 5;
}
/**
@@ -256,43 +259,6 @@ message BulkLoadHFileResponse {
required bool loaded = 1;
}
-/**
- * An individual coprocessor call. You must specify the protocol,
- * the method, and the row to which the call will be executed.
- *
- * You can specify the configuration settings in the property list.
- *
- * The parameter list has the parameters used for the method.
- * A parameter is a pair of parameter name and the binary parameter
- * value. The name is the parameter class name. The value is the
- * binary format of the parameter, for example, protocol buffer
- * encoded value.
- */
-message Exec {
- required bytes row = 1;
- required string protocolName = 2;
- required string methodName = 3;
- repeated NameStringPair property = 4;
- repeated NameBytesPair parameter = 5;
-}
-
- /**
- * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol}
- * method using the registered protocol handlers.
- * {@link CoprocessorProtocol} implementations must be registered via the
- * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(
- * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)}
- * method before they are available.
- */
-message ExecCoprocessorRequest {
- required RegionSpecifier region = 1;
- required Exec call = 2;
-}
-
-message ExecCoprocessorResponse {
- required NameBytesPair value = 1;
-}
-
message CoprocessorServiceCall {
required bytes row = 1;
required string serviceName = 2;
@@ -317,7 +283,6 @@ message CoprocessorServiceResponse {
message MultiAction {
optional Mutate mutate = 1;
optional Get get = 2;
- optional Exec exec = 3;
}
/**
@@ -329,7 +294,7 @@ message MultiAction {
* is returned as a stringified parameter.
*/
message ActionResult {
- optional NameBytesPair value = 1;
+ optional Result value = 1;
optional NameBytesPair exception = 2;
}
@@ -355,24 +320,18 @@ service ClientService {
rpc get(GetRequest)
returns(GetResponse);
+ rpc multiGet(MultiGetRequest)
+ returns(MultiGetResponse);
+
rpc mutate(MutateRequest)
returns(MutateResponse);
rpc scan(ScanRequest)
returns(ScanResponse);
- rpc lockRow(LockRowRequest)
- returns(LockRowResponse);
-
- rpc unlockRow(UnlockRowRequest)
- returns(UnlockRowResponse);
-
rpc bulkLoadHFile(BulkLoadHFileRequest)
returns(BulkLoadHFileResponse);
- rpc execCoprocessor(ExecCoprocessorRequest)
- returns(ExecCoprocessorResponse);
-
rpc execService(CoprocessorServiceRequest)
returns(CoprocessorServiceResponse);
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Filter.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Filter.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Filter.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/Filter.proto Thu Feb 14 12:58:12 2013
@@ -129,10 +129,8 @@ message SingleColumnValueFilter {
optional bytes columnQualifier = 2;
required CompareType compareOp = 3;
required Comparator comparator = 4;
- optional bool foundColumn = 5;
- optional bool matchedColumn = 6;
- optional bool filterIfMissing = 7;
- optional bool latestVersionOnly = 8;
+ optional bool filterIfMissing = 5;
+ optional bool latestVersionOnly = 6;
}
message SkipFilter {
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/HFile.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/HFile.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/HFile.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/HFile.proto Thu Feb 14 12:58:12 2013
@@ -27,3 +27,19 @@ import "hbase.proto";
message FileInfoProto {
repeated BytesBytesPair mapEntry = 1;
}
+
+// HFile file trailer
+message FileTrailerProto {
+ optional uint64 fileInfoOffset = 1;
+ optional uint64 loadOnOpenDataOffset = 2;
+ optional uint64 uncompressedDataIndexSize = 3;
+ optional uint64 totalUncompressedBytes = 4;
+ optional uint32 dataIndexCount = 5;
+ optional uint32 metaIndexCount = 6;
+ optional uint64 entryCount = 7;
+ optional uint32 numDataIndexLevels = 8;
+ optional uint64 firstDataBlockOffset = 9;
+ optional uint64 lastDataBlockOffset = 10;
+ optional string comparatorClassName = 11;
+ optional uint32 compressionCodec = 12;
+}
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/RPC.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/RPC.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/RPC.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/RPC.proto Thu Feb 14 12:58:12 2013
@@ -86,13 +86,10 @@ message RpcRequestBody {
/** Name of the RPC method */
required string methodName = 1;
- /** protocol version of class declaring the called method */
- optional uint64 clientProtocolVersion = 2;
-
/** Bytes corresponding to the client protobuf request. This is the actual
* bytes corresponding to the RPC request argument.
*/
- optional bytes request = 3;
+ optional bytes request = 2;
/** Some metainfo about the request. Helps us to treat RPCs with
* different priorities. For now this is just the classname of the request
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/ZooKeeper.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/ZooKeeper.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/ZooKeeper.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/ZooKeeper.proto Thu Feb 14 12:58:12 2013
@@ -44,7 +44,7 @@ message Master {
}
/**
- * Content of the '/hbase/shutdown', cluster state, znode.
+ * Content of the '/hbase/running', cluster state, znode.
*/
message ClusterUp {
// If this znode is present, cluster is up. Currently
Modified: hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/hbase.proto
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/hbase.proto?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/hbase.proto (original)
+++ hbase/branches/hbase-7290v2/hbase-protocol/src/main/protobuf/hbase.proto Thu Feb 14 12:58:12 2013
@@ -29,12 +29,9 @@ option optimize_for = SPEED;
*/
message TableSchema {
optional bytes name = 1;
- message Attribute {
- required bytes name = 1;
- required bytes value = 2;
- }
- repeated Attribute attributes = 2;
+ repeated BytesBytesPair attributes = 2;
repeated ColumnFamilySchema columnFamilies = 3;
+ repeated NameStringPair configuration = 4;
}
/**
@@ -43,11 +40,8 @@ message TableSchema {
*/
message ColumnFamilySchema {
required bytes name = 1;
- message Attribute {
- required bytes name = 1;
- required bytes value = 2;
- }
- repeated Attribute attributes = 2;
+ repeated BytesBytesPair attributes = 2;
+ repeated NameStringPair configuration = 3;
}
/**
@@ -283,3 +277,11 @@ message SnapshotDescription {
optional Type type = 4 [default = FLUSH];
optional int32 version = 5;
}
+
+message EmptyMsg {
+}
+
+message LongMsg {
+ required int64 longMsg = 1;
+
+}
Propchange: hbase/branches/hbase-7290v2/hbase-server/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Thu Feb 14 12:58:12 2013
@@ -3,5 +3,6 @@
build
logs
target
-
-
+*.iws
+*.iml
+*.ipr
Modified: hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon Thu Feb 14 12:58:12 2013
@@ -56,14 +56,14 @@ boolean first = true;
[<%for MonitoredTask task : tasks%><%if first%><%java>first = false;</%java><%else>,</%if><% task.toJSON() %></%for>]
<%else>
<h2>Tasks</h2>
- <div id="tasks_menu">
- <a href="?filter=all">Show All Monitored Tasks</a>
- <a href="?filter=general">Show non-RPC Tasks</a>
- <a href="?filter=handler">Show All RPC Handler Tasks</a>
- <a href="?filter=rpc">Show Active RPC Calls</a>
- <a href="?filter=operation">Show Client Operations</a>
- <a href="?format=json&filter=<% filter %>">View as JSON</a>
- </div>
+ <ul class="nav nav-pills">
+ <li <%if filter.equals("all")%>class="active"</%if>><a href="?filter=all">Show All Monitored Tasks</a></li>
+ <li <%if filter.equals("general")%>class="active"</%if>><a href="?filter=general">Show non-RPC Tasks</a></li>
+ <li <%if filter.equals("handler")%>class="active"</%if>><a href="?filter=handler">Show All RPC Handler Tasks</a></li>
+ <li <%if filter.equals("rpc")%>class="active"</%if>><a href="?filter=rpc">Show Active RPC Calls</a></li>
+ <li <%if filter.equals("operation")%>class="active"</%if>><a href="?filter=operation">Show Client Operations</a></li>
+ <li><a href="?format=json&filter=<% filter %>">View as JSON</a></li>
+ </ul>
<%if tasks.isEmpty()%>
<p>No tasks currently running on this node.</p>
<%else>
Modified: hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/AssignmentManagerStatusTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/AssignmentManagerStatusTmpl.jamon?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/AssignmentManagerStatusTmpl.jamon (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/AssignmentManagerStatusTmpl.jamon Thu Feb 14 12:58:12 2013
@@ -76,6 +76,7 @@ if (toRemove > 0) {
<%if !rit.isEmpty() %>
+ <section>
<h2>Regions in Transition</h2>
<table class="table table-striped">
<tr><th>Region</th><th>State</th><th>RIT time (ms)</th></tr>
@@ -95,5 +96,6 @@ if (toRemove > 0) {
<%if removed > 0 %>
(<% removed %> more regions in transition not shown)
</%if>
+ </section>
</%if>
Modified: hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon?rev=1446147&r1=1446146&r2=1446147&view=diff
==============================================================================
--- hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon (original)
+++ hbase/branches/hbase-7290v2/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon Thu Feb 14 12:58:12 2013
@@ -74,7 +74,7 @@ org.apache.hadoop.hbase.protobuf.generat
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</a>
- <a class="brand" href="/master-status">HBase Master</a>
+ <a class="brand" href="/master-status"><img src="/static/hbase_logo_small.png" alt="HBase Logo"/></a>
<div class="nav-collapse">
<ul class="nav">
<li class="active"><a href="/">Home</a></li>
@@ -94,15 +94,12 @@ org.apache.hadoop.hbase.protobuf.generat
<div class="container">
<div class="row inner_header">
- <div class="span8">
- <h1>Master: <% master.getServerName().getHostname() %> </h1>
- </div>
- <div class="span4 logo">
- <img src="/static/hbase_logo.png" height="66" width="266" alt="HBase logo"/>
+ <div class="page-header">
+ <h1>Master <small><% master.getServerName().getHostname() %> </small></h1>
</div>
</div>
-
+ <div class="row">
<!-- Various warnings that cluster admins should be aware of -->
<%if JvmVersion.isBadJvmVersion() %>
<div class="alert alert-error">
@@ -127,121 +124,130 @@ org.apache.hadoop.hbase.protobuf.generat
</div>
</%if>
+ <section>
+ <h2>Region Servers</h2>
+ <& RegionServerListTmpl; master= master; servers = servers &>
- <h2>Region Servers</h2>
- <& RegionServerListTmpl; master= master; servers = servers &>
-
- <%if (deadServers != null) %>
- <& deadRegionServers &>
- </%if>
-
- <h2>Backup Masters</h2>
- <& BackupMasterListTmpl; master = master &>
+ <%if (deadServers != null) %>
+ <& deadRegionServers &>
+ </%if>
+ </section>
- <h2>Tables</h2>
- <div class="tabbable">
- <ul class="nav nav-tabs">
- <li class="active">
- <a href="#userTables" data-toggle="tab">User Tables</a>
- </li>
- <li class="">
- <a href="#catalogTables" data-toggle="tab">Catalog Tables</a>
- </li>
- <li class="">
- <a href="#userSnapshots" data-toggle="tab">Snapshots</a>
- </li>
- </ul>
- <div class="tab-content" style="padding-bottom: 9px; border-bottom: 1px solid #ddd;">
- <div class="tab-pane active" id="userTables">
- <%if (metaLocation != null) %>
- <& userTables &>
- </%if>
- </div>
- <div class="tab-pane" id="catalogTables">
- <%if (rootLocation != null) %>
- <& catalogTables &>
- </%if>
- </div>
- <div class="tab-pane" id="userSnapshots">
- <& userSnapshots &>
+ <section>
+ <h2>Backup Masters</h2>
+ <& BackupMasterListTmpl; master = master &>
+ </section>
+
+ <section>
+ <h2>Tables</h2>
+ <div class="tabbable">
+ <ul class="nav nav-pills">
+ <li class="active">
+ <a href="#tab_userTables" data-toggle="tab">User Tables</a>
+ </li>
+ <li class="">
+ <a href="#tab_catalogTables" data-toggle="tab">Catalog Tables</a>
+ </li>
+ <li class="">
+ <a href="#tab_userSnapshots" data-toggle="tab">Snapshots</a>
+ </li>
+ </ul>
+ <div class="tab-content" style="padding-bottom: 9px; border-bottom: 1px solid #ddd;">
+ <div class="tab-pane active" id="tab_userTables">
+ <%if (metaLocation != null) %>
+ <& userTables &>
+ </%if>
+ </div>
+ <div class="tab-pane" id="tab_catalogTables">
+ <%if (rootLocation != null) %>
+ <& catalogTables &>
+ </%if>
+ </div>
+ <div class="tab-pane" id="userSnapshots">
+ <& userSnapshots &>
+ </div>
</div>
</div>
- </div>
-
+ </section>
<& AssignmentManagerStatusTmpl; assignmentManager=master.getAssignmentManager()&>
- <& ../common/TaskMonitorTmpl; filter = filter &>
-
- <h2>Software Attributes</h2>
- <table id="attributes_table" class="table table-striped">
- <tr>
- <th>Attribute Name</th>
- <th>Value</th>
- <th>Description</th>
- </tr>
- <tr>
- <td>HBase Version</td>
- <td><% org.apache.hadoop.hbase.util.VersionInfo.getVersion() %>, r<% org.apache.hadoop.hbase.util.VersionInfo.getRevision() %></td><td>HBase version and revision</td>
- </tr>
- <tr>
- <td>HBase Compiled</td>
- <td><% org.apache.hadoop.hbase.util.VersionInfo.getDate() %>, <% org.apache.hadoop.hbase.util.VersionInfo.getUser() %></td>
- <td>When HBase version was compiled and by whom</td>
- </tr>
- <tr>
- <td>Hadoop Version</td>
- <td><% org.apache.hadoop.util.VersionInfo.getVersion() %>, r<% org.apache.hadoop.util.VersionInfo.getRevision() %></td>
- <td>Hadoop version and revision</td>
- </tr>
- <tr>
- <td>Hadoop Compiled</td>
- <td><% org.apache.hadoop.util.VersionInfo.getDate() %>, <% org.apache.hadoop.util.VersionInfo.getUser() %></td>
- <td>When Hadoop version was compiled and by whom</td>
- </tr>
- <tr>
- <td>HBase Root Directory</td>
- <td><% FSUtils.getRootDir(master.getConfiguration()).toString() %></td>
- <td>Location of HBase home directory</td>
- </tr>
- <tr>
- <td>HBase Cluster ID</td>
- <td><% master.getClusterId() != null ? master.getClusterId() : "Not set" %></td>
- <td>Unique identifier generated for each HBase cluster</td>
- </tr>
- <tr>
- <td>Load average</td>
- <td><% StringUtils.limitDecimalTo2(master.getServerManager().getAverageLoad()) %></td>
- <td>Average number of regions per regionserver. Naive computation.</td>
- </tr>
- <%if frags != null %>
- <tr>
- <td>Fragmentation</td>
- <td><% frags.get("-TOTAL-") != null ? frags.get("-TOTAL-").intValue() + "%" : "n/a" %></td>
- <td>Overall fragmentation of all tables, including .META. and -ROOT-.</td>
- </tr>
- </%if>
- <tr>
- <td>Zookeeper Quorum</td>
- <td><% master.getZooKeeperWatcher().getQuorum() %></td>
- <td>Addresses of all registered ZK servers. For more, see <a href="/zk.jsp">zk dump</a>.</td>
- </tr>
- <tr>
- <td>Coprocessors</td>
- <td><% java.util.Arrays.toString(master.getCoprocessors()) %></td>
- <td>Coprocessors currently loaded by the master</td>
- </tr>
- <tr>
- <td>HMaster Start Time</td>
- <td><% new Date(master.getMasterStartTime()) %></td>
- <td>Date stamp of when this HMaster was started</td>
- </tr>
- <tr>
- <td>HMaster Active Time</td>
- <td><% new Date(master.getMasterActiveTime()) %></td>
- <td>Date stamp of when this HMaster became active</td>
- </tr>
- </table>
+ <section>
+ <& ../common/TaskMonitorTmpl; filter = filter &>
+ </section>
+
+ <section>
+ <h2>Software Attributes</h2>
+ <table id="attributes_table" class="table table-striped">
+ <tr>
+ <th>Attribute Name</th>
+ <th>Value</th>
+ <th>Description</th>
+ </tr>
+ <tr>
+ <td>HBase Version</td>
+ <td><% org.apache.hadoop.hbase.util.VersionInfo.getVersion() %>, r<% org.apache.hadoop.hbase.util.VersionInfo.getRevision() %></td><td>HBase version and revision</td>
+ </tr>
+ <tr>
+ <td>HBase Compiled</td>
+ <td><% org.apache.hadoop.hbase.util.VersionInfo.getDate() %>, <% org.apache.hadoop.hbase.util.VersionInfo.getUser() %></td>
+ <td>When HBase version was compiled and by whom</td>
+ </tr>
+ <tr>
+ <td>Hadoop Version</td>
+ <td><% org.apache.hadoop.util.VersionInfo.getVersion() %>, r<% org.apache.hadoop.util.VersionInfo.getRevision() %></td>
+ <td>Hadoop version and revision</td>
+ </tr>
+ <tr>
+ <td>Hadoop Compiled</td>
+ <td><% org.apache.hadoop.util.VersionInfo.getDate() %>, <% org.apache.hadoop.util.VersionInfo.getUser() %></td>
+ <td>When Hadoop version was compiled and by whom</td>
+ </tr>
+ <tr>
+ <td>HBase Root Directory</td>
+ <td><% FSUtils.getRootDir(master.getConfiguration()).toString() %></td>
+ <td>Location of HBase home directory</td>
+ </tr>
+ <tr>
+ <td>HBase Cluster ID</td>
+ <td><% master.getClusterId() != null ? master.getClusterId() : "Not set" %></td>
+ <td>Unique identifier generated for each HBase cluster</td>
+ </tr>
+ <tr>
+ <td>Load average</td>
+ <td><% StringUtils.limitDecimalTo2(master.getServerManager().getAverageLoad()) %></td>
+ <td>Average number of regions per regionserver. Naive computation.</td>
+ </tr>
+ <%if frags != null %>
+ <tr>
+ <td>Fragmentation</td>
+ <td><% frags.get("-TOTAL-") != null ? frags.get("-TOTAL-").intValue() + "%" : "n/a" %></td>
+ <td>Overall fragmentation of all tables, including .META. and -ROOT-.</td>
+ </tr>
+ </%if>
+ <tr>
+ <td>Zookeeper Quorum</td>
+ <td><% master.getZooKeeperWatcher().getQuorum() %></td>
+ <td>Addresses of all registered ZK servers. For more, see <a href="/zk.jsp">zk dump</a>.</td>
+ </tr>
+ <tr>
+ <td>Coprocessors</td>
+ <td><% java.util.Arrays.toString(master.getCoprocessors()) %></td>
+ <td>Coprocessors currently loaded by the master</td>
+ </tr>
+ <tr>
+ <td>HMaster Start Time</td>
+ <td><% new Date(master.getMasterStartTime()) %></td>
+ <td>Date stamp of when this HMaster was started</td>
+ </tr>
+ <tr>
+ <td>HMaster Active Time</td>
+ <td><% new Date(master.getMasterActiveTime()) %></td>
+ <td>Date stamp of when this HMaster became active</td>
+ </tr>
+ </table>
+ </section>
+ </div>
</div> <!-- /container -->
<script src="/static/js/jquery.min.js" type="text/javascript"></script>
@@ -284,6 +290,7 @@ org.apache.hadoop.hbase.protobuf.generat
<%def userTables>
<%java>
HTableDescriptor[] tables = admin.listTables();
+ HConnectionManager.deleteConnection(admin.getConfiguration());
</%java>
<%if (tables != null && tables.length > 0)%>
<table class="table table-striped">
@@ -360,7 +367,3 @@ org.apache.hadoop.hbase.protobuf.generat
</table>
</%if>
</%def>
-
-<%java>
- HConnectionManager.deleteConnection(admin.getConfiguration(), false);
-</%java>
\ No newline at end of file