You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by to...@apache.org on 2011/06/11 02:36:13 UTC
svn commit: r1134492 [3/4] - in /hadoop/hdfs/trunk: ./
src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/protocol/
src/java/org/apache/hadoop/hdfs/protocol/proto/
src/java/org/apache/hadoop/hdfs/server/balancer/ src/java/org/apache/hadoo...
Added: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/proto/HdfsProtos.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/proto/HdfsProtos.java?rev=1134492&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/proto/HdfsProtos.java (added)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/protocol/proto/HdfsProtos.java Sat Jun 11 00:36:12 2011
@@ -0,0 +1,3247 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: hdfs.proto
+
+package org.apache.hadoop.hdfs.protocol.proto;
+
+public final class HdfsProtos {
+ private HdfsProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface ExtendedBlockProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string poolId = 1;
+ boolean hasPoolId();
+ String getPoolId();
+
+ // required uint64 blockId = 2;
+ boolean hasBlockId();
+ long getBlockId();
+
+ // required uint64 numBytes = 3;
+ boolean hasNumBytes();
+ long getNumBytes();
+
+ // required uint64 generationStamp = 4;
+ boolean hasGenerationStamp();
+ long getGenerationStamp();
+ }
+ public static final class ExtendedBlockProto extends
+ com.google.protobuf.GeneratedMessage
+ implements ExtendedBlockProtoOrBuilder {
+ // Use ExtendedBlockProto.newBuilder() to construct.
+ private ExtendedBlockProto(Builder builder) {
+ super(builder);
+ }
+ private ExtendedBlockProto(boolean noInit) {}
+
+ private static final ExtendedBlockProto defaultInstance;
+ public static ExtendedBlockProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ExtendedBlockProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string poolId = 1;
+ public static final int POOLID_FIELD_NUMBER = 1;
+ private java.lang.Object poolId_;
+ public boolean hasPoolId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getPoolId() {
+ java.lang.Object ref = poolId_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ poolId_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getPoolIdBytes() {
+ java.lang.Object ref = poolId_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ poolId_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required uint64 blockId = 2;
+ public static final int BLOCKID_FIELD_NUMBER = 2;
+ private long blockId_;
+ public boolean hasBlockId() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getBlockId() {
+ return blockId_;
+ }
+
+ // required uint64 numBytes = 3;
+ public static final int NUMBYTES_FIELD_NUMBER = 3;
+ private long numBytes_;
+ public boolean hasNumBytes() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getNumBytes() {
+ return numBytes_;
+ }
+
+ // required uint64 generationStamp = 4;
+ public static final int GENERATIONSTAMP_FIELD_NUMBER = 4;
+ private long generationStamp_;
+ public boolean hasGenerationStamp() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getGenerationStamp() {
+ return generationStamp_;
+ }
+
+ private void initFields() {
+ poolId_ = "";
+ blockId_ = 0L;
+ numBytes_ = 0L;
+ generationStamp_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasPoolId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasBlockId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasNumBytes()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasGenerationStamp()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getPoolIdBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, blockId_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, numBytes_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt64(4, generationStamp_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getPoolIdBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, blockId_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, numBytes_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(4, generationStamp_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto) obj;
+
+ boolean result = true;
+ result = result && (hasPoolId() == other.hasPoolId());
+ if (hasPoolId()) {
+ result = result && getPoolId()
+ .equals(other.getPoolId());
+ }
+ result = result && (hasBlockId() == other.hasBlockId());
+ if (hasBlockId()) {
+ result = result && (getBlockId()
+ == other.getBlockId());
+ }
+ result = result && (hasNumBytes() == other.hasNumBytes());
+ if (hasNumBytes()) {
+ result = result && (getNumBytes()
+ == other.getNumBytes());
+ }
+ result = result && (hasGenerationStamp() == other.hasGenerationStamp());
+ if (hasGenerationStamp()) {
+ result = result && (getGenerationStamp()
+ == other.getGenerationStamp());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasPoolId()) {
+ hash = (37 * hash) + POOLID_FIELD_NUMBER;
+ hash = (53 * hash) + getPoolId().hashCode();
+ }
+ if (hasBlockId()) {
+ hash = (37 * hash) + BLOCKID_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getBlockId());
+ }
+ if (hasNumBytes()) {
+ hash = (37 * hash) + NUMBYTES_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getNumBytes());
+ }
+ if (hasGenerationStamp()) {
+ hash = (37 * hash) + GENERATIONSTAMP_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getGenerationStamp());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_ExtendedBlockProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ poolId_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ blockId_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ numBytes_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ generationStamp_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto getDefaultInstanceForType() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto build() {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto buildPartial() {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.poolId_ = poolId_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.blockId_ = blockId_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.numBytes_ = numBytes_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.generationStamp_ = generationStamp_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto) {
+ return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto other) {
+ if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto.getDefaultInstance()) return this;
+ if (other.hasPoolId()) {
+ setPoolId(other.getPoolId());
+ }
+ if (other.hasBlockId()) {
+ setBlockId(other.getBlockId());
+ }
+ if (other.hasNumBytes()) {
+ setNumBytes(other.getNumBytes());
+ }
+ if (other.hasGenerationStamp()) {
+ setGenerationStamp(other.getGenerationStamp());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasPoolId()) {
+
+ return false;
+ }
+ if (!hasBlockId()) {
+
+ return false;
+ }
+ if (!hasNumBytes()) {
+
+ return false;
+ }
+ if (!hasGenerationStamp()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ poolId_ = input.readBytes();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ blockId_ = input.readUInt64();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ numBytes_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ generationStamp_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string poolId = 1;
+ private java.lang.Object poolId_ = "";
+ public boolean hasPoolId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getPoolId() {
+ java.lang.Object ref = poolId_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ poolId_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setPoolId(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ poolId_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearPoolId() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ poolId_ = getDefaultInstance().getPoolId();
+ onChanged();
+ return this;
+ }
+ void setPoolId(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ poolId_ = value;
+ onChanged();
+ }
+
+ // required uint64 blockId = 2;
+ private long blockId_ ;
+ public boolean hasBlockId() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getBlockId() {
+ return blockId_;
+ }
+ public Builder setBlockId(long value) {
+ bitField0_ |= 0x00000002;
+ blockId_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearBlockId() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ blockId_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint64 numBytes = 3;
+ private long numBytes_ ;
+ public boolean hasNumBytes() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getNumBytes() {
+ return numBytes_;
+ }
+ public Builder setNumBytes(long value) {
+ bitField0_ |= 0x00000004;
+ numBytes_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearNumBytes() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ numBytes_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint64 generationStamp = 4;
+ private long generationStamp_ ;
+ public boolean hasGenerationStamp() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getGenerationStamp() {
+ return generationStamp_;
+ }
+ public Builder setGenerationStamp(long value) {
+ bitField0_ |= 0x00000008;
+ generationStamp_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearGenerationStamp() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ generationStamp_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ExtendedBlockProto)
+ }
+
+ static {
+ defaultInstance = new ExtendedBlockProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ExtendedBlockProto)
+ }
+
+ public interface BlockTokenIdentifierProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes identifier = 1;
+ boolean hasIdentifier();
+ com.google.protobuf.ByteString getIdentifier();
+
+ // required bytes password = 2;
+ boolean hasPassword();
+ com.google.protobuf.ByteString getPassword();
+
+ // required string kind = 3;
+ boolean hasKind();
+ String getKind();
+
+ // required string service = 4;
+ boolean hasService();
+ String getService();
+ }
+ public static final class BlockTokenIdentifierProto extends
+ com.google.protobuf.GeneratedMessage
+ implements BlockTokenIdentifierProtoOrBuilder {
+ // Use BlockTokenIdentifierProto.newBuilder() to construct.
+ private BlockTokenIdentifierProto(Builder builder) {
+ super(builder);
+ }
+ private BlockTokenIdentifierProto(boolean noInit) {}
+
+ private static final BlockTokenIdentifierProto defaultInstance;
+ public static BlockTokenIdentifierProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BlockTokenIdentifierProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required bytes identifier = 1;
+ public static final int IDENTIFIER_FIELD_NUMBER = 1;
+ private com.google.protobuf.ByteString identifier_;
+ public boolean hasIdentifier() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getIdentifier() {
+ return identifier_;
+ }
+
+ // required bytes password = 2;
+ public static final int PASSWORD_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString password_;
+ public boolean hasPassword() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getPassword() {
+ return password_;
+ }
+
+ // required string kind = 3;
+ public static final int KIND_FIELD_NUMBER = 3;
+ private java.lang.Object kind_;
+ public boolean hasKind() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public String getKind() {
+ java.lang.Object ref = kind_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ kind_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getKindBytes() {
+ java.lang.Object ref = kind_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ kind_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required string service = 4;
+ public static final int SERVICE_FIELD_NUMBER = 4;
+ private java.lang.Object service_;
+ public boolean hasService() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public String getService() {
+ java.lang.Object ref = service_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ service_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getServiceBytes() {
+ java.lang.Object ref = service_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ service_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ identifier_ = com.google.protobuf.ByteString.EMPTY;
+ password_ = com.google.protobuf.ByteString.EMPTY;
+ kind_ = "";
+ service_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasIdentifier()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasPassword()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasKind()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasService()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, identifier_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, password_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, getKindBytes());
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeBytes(4, getServiceBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, identifier_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, password_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, getKindBytes());
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(4, getServiceBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto) obj;
+
+ boolean result = true;
+ result = result && (hasIdentifier() == other.hasIdentifier());
+ if (hasIdentifier()) {
+ result = result && getIdentifier()
+ .equals(other.getIdentifier());
+ }
+ result = result && (hasPassword() == other.hasPassword());
+ if (hasPassword()) {
+ result = result && getPassword()
+ .equals(other.getPassword());
+ }
+ result = result && (hasKind() == other.hasKind());
+ if (hasKind()) {
+ result = result && getKind()
+ .equals(other.getKind());
+ }
+ result = result && (hasService() == other.hasService());
+ if (hasService()) {
+ result = result && getService()
+ .equals(other.getService());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasIdentifier()) {
+ hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getIdentifier().hashCode();
+ }
+ if (hasPassword()) {
+ hash = (37 * hash) + PASSWORD_FIELD_NUMBER;
+ hash = (53 * hash) + getPassword().hashCode();
+ }
+ if (hasKind()) {
+ hash = (37 * hash) + KIND_FIELD_NUMBER;
+ hash = (53 * hash) + getKind().hashCode();
+ }
+ if (hasService()) {
+ hash = (37 * hash) + SERVICE_FIELD_NUMBER;
+ hash = (53 * hash) + getService().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_BlockTokenIdentifierProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ identifier_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ password_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ kind_ = "";
+ bitField0_ = (bitField0_ & ~0x00000004);
+ service_ = "";
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto getDefaultInstanceForType() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto build() {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto buildPartial() {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.identifier_ = identifier_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.password_ = password_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.kind_ = kind_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.service_ = service_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto) {
+ return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto other) {
+ if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto.getDefaultInstance()) return this;
+ if (other.hasIdentifier()) {
+ setIdentifier(other.getIdentifier());
+ }
+ if (other.hasPassword()) {
+ setPassword(other.getPassword());
+ }
+ if (other.hasKind()) {
+ setKind(other.getKind());
+ }
+ if (other.hasService()) {
+ setService(other.getService());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasIdentifier()) {
+
+ return false;
+ }
+ if (!hasPassword()) {
+
+ return false;
+ }
+ if (!hasKind()) {
+
+ return false;
+ }
+ if (!hasService()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ identifier_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ password_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ kind_ = input.readBytes();
+ break;
+ }
+ case 34: {
+ bitField0_ |= 0x00000008;
+ service_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required bytes identifier = 1;
+ private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasIdentifier() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public com.google.protobuf.ByteString getIdentifier() {
+ return identifier_;
+ }
+ public Builder setIdentifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ identifier_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearIdentifier() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ identifier_ = getDefaultInstance().getIdentifier();
+ onChanged();
+ return this;
+ }
+
+ // required bytes password = 2;
+ private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasPassword() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getPassword() {
+ return password_;
+ }
+ public Builder setPassword(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ password_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearPassword() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ password_ = getDefaultInstance().getPassword();
+ onChanged();
+ return this;
+ }
+
+ // required string kind = 3;
+ private java.lang.Object kind_ = "";
+ public boolean hasKind() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public String getKind() {
+ java.lang.Object ref = kind_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ kind_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setKind(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ kind_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearKind() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ kind_ = getDefaultInstance().getKind();
+ onChanged();
+ return this;
+ }
+ void setKind(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000004;
+ kind_ = value;
+ onChanged();
+ }
+
+ // required string service = 4;
+ private java.lang.Object service_ = "";
+ public boolean hasService() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public String getService() {
+ java.lang.Object ref = service_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ service_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setService(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ service_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearService() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ service_ = getDefaultInstance().getService();
+ onChanged();
+ return this;
+ }
+ void setService(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000008;
+ service_ = value;
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:BlockTokenIdentifierProto)
+ }
+
+ static {
+ defaultInstance = new BlockTokenIdentifierProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:BlockTokenIdentifierProto)
+ }
+
+ public interface DatanodeIDProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string name = 1;
+ boolean hasName();
+ String getName();
+
+ // required string storageID = 2;
+ boolean hasStorageID();
+ String getStorageID();
+
+ // required uint32 infoPort = 3;
+ boolean hasInfoPort();
+ int getInfoPort();
+ }
+ public static final class DatanodeIDProto extends
+ com.google.protobuf.GeneratedMessage
+ implements DatanodeIDProtoOrBuilder {
+ // Use DatanodeIDProto.newBuilder() to construct.
+ private DatanodeIDProto(Builder builder) {
+ super(builder);
+ }
+ private DatanodeIDProto(boolean noInit) {}
+
+ private static final DatanodeIDProto defaultInstance;
+ public static DatanodeIDProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public DatanodeIDProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string name = 1;
+ public static final int NAME_FIELD_NUMBER = 1;
+ private java.lang.Object name_;
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ name_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required string storageID = 2;
+ public static final int STORAGEID_FIELD_NUMBER = 2;
+ private java.lang.Object storageID_;
+ public boolean hasStorageID() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getStorageID() {
+ java.lang.Object ref = storageID_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ storageID_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getStorageIDBytes() {
+ java.lang.Object ref = storageID_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ storageID_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // required uint32 infoPort = 3;
+ public static final int INFOPORT_FIELD_NUMBER = 3;
+ private int infoPort_;
+ public boolean hasInfoPort() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public int getInfoPort() {
+ return infoPort_;
+ }
+
+ private void initFields() {
+ name_ = "";
+ storageID_ = "";
+ infoPort_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasStorageID()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasInfoPort()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getStorageIDBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt32(3, infoPort_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getStorageIDBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(3, infoPort_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto) obj;
+
+ boolean result = true;
+ result = result && (hasName() == other.hasName());
+ if (hasName()) {
+ result = result && getName()
+ .equals(other.getName());
+ }
+ result = result && (hasStorageID() == other.hasStorageID());
+ if (hasStorageID()) {
+ result = result && getStorageID()
+ .equals(other.getStorageID());
+ }
+ result = result && (hasInfoPort() == other.hasInfoPort());
+ if (hasInfoPort()) {
+ result = result && (getInfoPort()
+ == other.getInfoPort());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasName()) {
+ hash = (37 * hash) + NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getName().hashCode();
+ }
+ if (hasStorageID()) {
+ hash = (37 * hash) + STORAGEID_FIELD_NUMBER;
+ hash = (53 * hash) + getStorageID().hashCode();
+ }
+ if (hasInfoPort()) {
+ hash = (37 * hash) + INFOPORT_FIELD_NUMBER;
+ hash = (53 * hash) + getInfoPort();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeIDProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ name_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ storageID_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ infoPort_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getDefaultInstanceForType() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto build() {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto buildPartial() {
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto result = new org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.name_ = name_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.storageID_ = storageID_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.infoPort_ = infoPort_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto) {
+ return mergeFrom((org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto other) {
+ if (other == org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance()) return this;
+ if (other.hasName()) {
+ setName(other.getName());
+ }
+ if (other.hasStorageID()) {
+ setStorageID(other.getStorageID());
+ }
+ if (other.hasInfoPort()) {
+ setInfoPort(other.getInfoPort());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasName()) {
+
+ return false;
+ }
+ if (!hasStorageID()) {
+
+ return false;
+ }
+ if (!hasInfoPort()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ name_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ storageID_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ infoPort_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string name = 1;
+ private java.lang.Object name_ = "";
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ name_ = getDefaultInstance().getName();
+ onChanged();
+ return this;
+ }
+ void setName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ }
+
+ // required string storageID = 2;
+ private java.lang.Object storageID_ = "";
+ public boolean hasStorageID() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getStorageID() {
+ java.lang.Object ref = storageID_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ storageID_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setStorageID(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ storageID_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearStorageID() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ storageID_ = getDefaultInstance().getStorageID();
+ onChanged();
+ return this;
+ }
+ void setStorageID(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000002;
+ storageID_ = value;
+ onChanged();
+ }
+
+ // required uint32 infoPort = 3;
+ private int infoPort_ ;
+ public boolean hasInfoPort() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public int getInfoPort() {
+ return infoPort_;
+ }
+ public Builder setInfoPort(int value) {
+ bitField0_ |= 0x00000004;
+ infoPort_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearInfoPort() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ infoPort_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:DatanodeIDProto)
+ }
+
+ static {
+ defaultInstance = new DatanodeIDProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:DatanodeIDProto)
+ }
+
+ public interface DatanodeInfoProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .DatanodeIDProto id = 1;
+ boolean hasId();
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId();
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder();
+
+ // optional uint64 capacity = 2;
+ boolean hasCapacity();
+ long getCapacity();
+
+ // optional uint64 dfsUsed = 3;
+ boolean hasDfsUsed();
+ long getDfsUsed();
+
+ // optional uint64 remaining = 4;
+ boolean hasRemaining();
+ long getRemaining();
+
+ // optional uint64 blockPoolUsed = 5;
+ boolean hasBlockPoolUsed();
+ long getBlockPoolUsed();
+
+ // optional uint64 lastUpdate = 6;
+ boolean hasLastUpdate();
+ long getLastUpdate();
+
+ // optional uint32 xceiverCount = 7;
+ boolean hasXceiverCount();
+ int getXceiverCount();
+
+ // optional string location = 8;
+ boolean hasLocation();
+ String getLocation();
+
+ // optional string hostName = 9;
+ boolean hasHostName();
+ String getHostName();
+
+ // optional .DatanodeInfoProto.AdminState adminState = 10;
+ boolean hasAdminState();
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState();
+ }
+ public static final class DatanodeInfoProto extends
+ com.google.protobuf.GeneratedMessage
+ implements DatanodeInfoProtoOrBuilder {
+ // Use DatanodeInfoProto.newBuilder() to construct.
+ private DatanodeInfoProto(Builder builder) {
+ super(builder);
+ }
+ private DatanodeInfoProto(boolean noInit) {}
+
+ private static final DatanodeInfoProto defaultInstance;
+ public static DatanodeInfoProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public DatanodeInfoProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.internal_static_DatanodeInfoProto_fieldAccessorTable;
+ }
+
+ public enum AdminState
+ implements com.google.protobuf.ProtocolMessageEnum {
+ NORMAL(0, 0),
+ DECOMMISSION_INPROGRESS(1, 1),
+ DECOMMISSIONED(2, 2),
+ ;
+
+ public static final int NORMAL_VALUE = 0;
+ public static final int DECOMMISSION_INPROGRESS_VALUE = 1;
+ public static final int DECOMMISSIONED_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static AdminState valueOf(int value) {
+ switch (value) {
+ case 0: return NORMAL;
+ case 1: return DECOMMISSION_INPROGRESS;
+ case 2: return DECOMMISSIONED;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<AdminState>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<AdminState>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<AdminState>() {
+ public AdminState findValueByNumber(int number) {
+ return AdminState.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final AdminState[] VALUES = {
+ NORMAL, DECOMMISSION_INPROGRESS, DECOMMISSIONED,
+ };
+
+ public static AdminState valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private AdminState(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:DatanodeInfoProto.AdminState)
+ }
+
+ private int bitField0_;
+ // required .DatanodeIDProto id = 1;
+ public static final int ID_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto id_;
+ public boolean hasId() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto getId() {
+ return id_;
+ }
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProtoOrBuilder getIdOrBuilder() {
+ return id_;
+ }
+
+ // optional uint64 capacity = 2;
+ public static final int CAPACITY_FIELD_NUMBER = 2;
+ private long capacity_;
+ public boolean hasCapacity() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public long getCapacity() {
+ return capacity_;
+ }
+
+ // optional uint64 dfsUsed = 3;
+ public static final int DFSUSED_FIELD_NUMBER = 3;
+ private long dfsUsed_;
+ public boolean hasDfsUsed() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public long getDfsUsed() {
+ return dfsUsed_;
+ }
+
+ // optional uint64 remaining = 4;
+ public static final int REMAINING_FIELD_NUMBER = 4;
+ private long remaining_;
+ public boolean hasRemaining() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ public long getRemaining() {
+ return remaining_;
+ }
+
+ // optional uint64 blockPoolUsed = 5;
+ public static final int BLOCKPOOLUSED_FIELD_NUMBER = 5;
+ private long blockPoolUsed_;
+ public boolean hasBlockPoolUsed() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ public long getBlockPoolUsed() {
+ return blockPoolUsed_;
+ }
+
+ // optional uint64 lastUpdate = 6;
+ public static final int LASTUPDATE_FIELD_NUMBER = 6;
+ private long lastUpdate_;
+ public boolean hasLastUpdate() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ public long getLastUpdate() {
+ return lastUpdate_;
+ }
+
+ // optional uint32 xceiverCount = 7;
+ public static final int XCEIVERCOUNT_FIELD_NUMBER = 7;
+ private int xceiverCount_;
+ public boolean hasXceiverCount() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ public int getXceiverCount() {
+ return xceiverCount_;
+ }
+
+ // optional string location = 8;
+ public static final int LOCATION_FIELD_NUMBER = 8;
+ private java.lang.Object location_;
+ public boolean hasLocation() {
+ return ((bitField0_ & 0x00000080) == 0x00000080);
+ }
+ public String getLocation() {
+ java.lang.Object ref = location_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ location_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getLocationBytes() {
+ java.lang.Object ref = location_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ location_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string hostName = 9;
+ public static final int HOSTNAME_FIELD_NUMBER = 9;
+ private java.lang.Object hostName_;
+ public boolean hasHostName() {
+ return ((bitField0_ & 0x00000100) == 0x00000100);
+ }
+ public String getHostName() {
+ java.lang.Object ref = hostName_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ hostName_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getHostNameBytes() {
+ java.lang.Object ref = hostName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ hostName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional .DatanodeInfoProto.AdminState adminState = 10;
+ public static final int ADMINSTATE_FIELD_NUMBER = 10;
+ private org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState adminState_;
+ public boolean hasAdminState() {
+ return ((bitField0_ & 0x00000200) == 0x00000200);
+ }
+ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState getAdminState() {
+ return adminState_;
+ }
+
+ private void initFields() {
+ id_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto.getDefaultInstance();
+ capacity_ = 0L;
+ dfsUsed_ = 0L;
+ remaining_ = 0L;
+ blockPoolUsed_ = 0L;
+ lastUpdate_ = 0L;
+ xceiverCount_ = 0;
+ location_ = "";
+ hostName_ = "";
+ adminState_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto.AdminState.NORMAL;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasId()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getId().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, id_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, capacity_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, dfsUsed_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt64(4, remaining_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeUInt64(5, blockPoolUsed_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeUInt64(6, lastUpdate_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ output.writeUInt32(7, xceiverCount_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ output.writeBytes(8, getLocationBytes());
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ output.writeBytes(9, getHostNameBytes());
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ output.writeEnum(10, adminState_.getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, id_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, capacity_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, dfsUsed_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(4, remaining_);
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(5, blockPoolUsed_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(6, lastUpdate_);
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(7, xceiverCount_);
+ }
+ if (((bitField0_ & 0x00000080) == 0x00000080)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(8, getLocationBytes());
+ }
+ if (((bitField0_ & 0x00000100) == 0x00000100)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(9, getHostNameBytes());
+ }
+ if (((bitField0_ & 0x00000200) == 0x00000200)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(10, adminState_.getNumber());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto other = (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto) obj;
+
+ boolean result = true;
+ result = result && (hasId() == other.hasId());
+ if (hasId()) {
+ result = result && getId()
+ .equals(other.getId());
+ }
+ result = result && (hasCapacity() == other.hasCapacity());
+ if (hasCapacity()) {
+ result = result && (getCapacity()
+ == other.getCapacity());
+ }
+ result = result && (hasDfsUsed() == other.hasDfsUsed());
+ if (hasDfsUsed()) {
+ result = result && (getDfsUsed()
+ == other.getDfsUsed());
+ }
+ result = result && (hasRemaining() == other.hasRemaining());
+ if (hasRemaining()) {
+ result = result && (getRemaining()
+ == other.getRemaining());
+ }
+ result = result && (hasBlockPoolUsed() == other.hasBlockPoolUsed());
+ if (hasBlockPoolUsed()) {
+ result = result && (getBlockPoolUsed()
+ == other.getBlockPoolUsed());
+ }
+ result = result && (hasLastUpdate() == other.hasLastUpdate());
+ if (hasLastUpdate()) {
+ result = result && (getLastUpdate()
+ == other.getLastUpdate());
+ }
+ result = result && (hasXceiverCount() == other.hasXceiverCount());
+ if (hasXceiverCount()) {
+ result = result && (getXceiverCount()
+ == other.getXceiverCount());
+ }
+ result = result && (hasLocation() == other.hasLocation());
+ if (hasLocation()) {
+ result = result && getLocation()
+ .equals(other.getLocation());
+ }
+ result = result && (hasHostName() == other.hasHostName());
+ if (hasHostName()) {
+ result = result && getHostName()
+ .equals(other.getHostName());
+ }
+ result = result && (hasAdminState() == other.hasAdminState());
+ if (hasAdminState()) {
+ result = result &&
+ (getAdminState() == other.getAdminState());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasId()) {
+ hash = (37 * hash) + ID_FIELD_NUMBER;
+ hash = (53 * hash) + getId().hashCode();
+ }
+ if (hasCapacity()) {
+ hash = (37 * hash) + CAPACITY_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getCapacity());
+ }
+ if (hasDfsUsed()) {
+ hash = (37 * hash) + DFSUSED_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getDfsUsed());
+ }
+ if (hasRemaining()) {
+ hash = (37 * hash) + REMAINING_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRemaining());
+ }
+ if (hasBlockPoolUsed()) {
+ hash = (37 * hash) + BLOCKPOOLUSED_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getBlockPoolUsed());
+ }
+ if (hasLastUpdate()) {
+ hash = (37 * hash) + LASTUPDATE_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getLastUpdate());
+ }
+ if (hasXceiverCount()) {
+ hash = (37 * hash) + XCEIVERCOUNT_FIELD_NUMBER;
+ hash = (53 * hash) + getXceiverCount();
+ }
+ if (hasLocation()) {
+ hash = (37 * hash) + LOCATION_FIELD_NUMBER;
+ hash = (53 * hash) + getLocation().hashCode();
+ }
+ if (hasHostName()) {
+ hash = (37 * hash) + HOSTNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getHostName().hashCode();
+ }
+ if (hasAdminState()) {
+ hash = (37 * hash) + ADMINSTATE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getAdminState());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
[... 752 lines stripped ...]