You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2014/02/07 07:15:08 UTC
svn commit: r1565546 - in /hbase/branches/0.96:
hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/
hbase-protocol/src/main/protobuf/
hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/
hbase-server/src/main/jamon/org/...
Author: stack
Date: Fri Feb 7 06:15:08 2014
New Revision: 1565546
URL: http://svn.apache.org/r1565546
Log:
HBASE-10340 [BACKPORT] HBASE-9892 Add info port to ServerName to support multi instances in a node
Modified:
hbase/branches/0.96/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
hbase/branches/0.96/hbase-protocol/src/main/protobuf/HBase.proto
hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java
hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
hbase/branches/0.96/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
Modified: hbase/branches/0.96/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java (original)
+++ hbase/branches/0.96/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java Fri Feb 7 06:15:08 2014
@@ -13953,6 +13953,450 @@ public final class HBaseProtos {
// @@protoc_insertion_point(class_scope:NamespaceDescriptor)
}
+ public interface RegionServerInfoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional int32 infoPort = 1;
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ boolean hasInfoPort();
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ int getInfoPort();
+ }
+ /**
+ * Protobuf type {@code RegionServerInfo}
+ *
+ * <pre>
+ **
+ * Description of the region server info
+ * </pre>
+ */
+ public static final class RegionServerInfo extends
+ com.google.protobuf.GeneratedMessage
+ implements RegionServerInfoOrBuilder {
+ // Use RegionServerInfo.newBuilder() to construct.
+ private RegionServerInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private RegionServerInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final RegionServerInfo defaultInstance;
+ public static RegionServerInfo getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RegionServerInfo getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private RegionServerInfo(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ infoPort_ = input.readInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<RegionServerInfo> PARSER =
+ new com.google.protobuf.AbstractParser<RegionServerInfo>() {
+ public RegionServerInfo parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new RegionServerInfo(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<RegionServerInfo> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // optional int32 infoPort = 1;
+ public static final int INFOPORT_FIELD_NUMBER = 1;
+ private int infoPort_;
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ public boolean hasInfoPort() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ public int getInfoPort() {
+ return infoPort_;
+ }
+
+ private void initFields() {
+ infoPort_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeInt32(1, infoPort_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(1, infoPort_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) obj;
+
+ boolean result = true;
+ result = result && (hasInfoPort() == other.hasInfoPort());
+ if (hasInfoPort()) {
+ result = result && (getInfoPort()
+ == other.getInfoPort());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasInfoPort()) {
+ hash = (37 * hash) + INFOPORT_FIELD_NUMBER;
+ hash = (53 * hash) + getInfoPort();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code RegionServerInfo}
+ *
+ * <pre>
+ **
+ * Description of the region server info
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ infoPort_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo build() {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.infoPort_ = infoPort_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance()) return this;
+ if (other.hasInfoPort()) {
+ setInfoPort(other.getInfoPort());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional int32 infoPort = 1;
+ private int infoPort_ ;
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ public boolean hasInfoPort() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ public int getInfoPort() {
+ return infoPort_;
+ }
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ public Builder setInfoPort(int value) {
+ bitField0_ |= 0x00000001;
+ infoPort_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 infoPort = 1;</code>
+ */
+ public Builder clearInfoPort() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ infoPort_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:RegionServerInfo)
+ }
+
+ static {
+ defaultInstance = new RegionServerInfo(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:RegionServerInfo)
+ }
+
private static com.google.protobuf.Descriptors.Descriptor
internal_static_TableName_descriptor;
private static
@@ -14048,6 +14492,11 @@ public final class HBaseProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_NamespaceDescriptor_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_RegionServerInfo_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_RegionServerInfo_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -14092,11 +14541,12 @@ public final class HBaseProtos {
"5\n\004UUID\022\026\n\016least_sig_bits\030\001 \002(\004\022\025\n\rmost_" +
"sig_bits\030\002 \002(\004\"K\n\023NamespaceDescriptor\022\014\n" +
"\004name\030\001 \002(\014\022&\n\rconfiguration\030\002 \003(\0132\017.Nam" +
- "eStringPair*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\r" +
- "LESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020" +
- "\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005" +
- "NO_OP\020\006B>\n*org.apache.hadoop.hbase.proto" +
- "buf.generatedB\013HBaseProtosH\001\240\001\001"
+ "eStringPair\"$\n\020RegionServerInfo\022\020\n\010infoP" +
+ "ort\030\001 \001(\005*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rLE" +
+ "SS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022" +
+ "\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO" +
+ "_OP\020\006B>\n*org.apache.hadoop.hbase.protobu",
+ "f.generatedB\013HBaseProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -14217,6 +14667,12 @@ public final class HBaseProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_NamespaceDescriptor_descriptor,
new java.lang.String[] { "Name", "Configuration", });
+ internal_static_RegionServerInfo_descriptor =
+ getDescriptor().getMessageTypes().get(19);
+ internal_static_RegionServerInfo_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_RegionServerInfo_descriptor,
+ new java.lang.String[] { "InfoPort", });
return null;
}
};
Modified: hbase/branches/0.96/hbase-protocol/src/main/protobuf/HBase.proto
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-protocol/src/main/protobuf/HBase.proto?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-protocol/src/main/protobuf/HBase.proto (original)
+++ hbase/branches/0.96/hbase-protocol/src/main/protobuf/HBase.proto Fri Feb 7 06:15:08 2014
@@ -185,3 +185,10 @@ message NamespaceDescriptor {
required bytes name = 1;
repeated NameStringPair configuration = 2;
}
+
+/**
+ * Description of the region server info
+ */
+message RegionServerInfo {
+ optional int32 infoPort = 1;
+}
Modified: hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon (original)
+++ hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon Fri Feb 7 06:15:08 2014
@@ -401,7 +401,6 @@ AssignmentManager assignmentManager = ma
ServerName [] deadServerNames = deadServers.toArray(new ServerName[deadServers.size()]);
Arrays.sort(deadServerNames);
for (ServerName deadServerName: deadServerNames) {
- int infoPort = master.getConfiguration().getInt("hbase.regionserver.info.port", 60030);
</%java>
<tr>
<th></th>
Modified: hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon (original)
+++ hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon Fri Feb 7 06:15:08 2014
@@ -280,9 +280,7 @@ if (sl.getTotalCompactingKVs() > 0) {
ServerLoad serverLoad;
</%args>
<%java>
- boolean useDefault = (serverLoad == null);
- int defaultPort = master.getConfiguration().getInt("hbase.regionserver.info.port", 60030);
- int infoPort = useDefault?defaultPort:serverLoad.getInfoServerPort();
+ int infoPort = master.getRegionServerInfoPort(serverName);
String url = "//" + serverName.getHostname() + ":" + infoPort + "/";
</%java>
Modified: hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon (original)
+++ hbase/branches/0.96/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon Fri Feb 7 06:15:08 2014
@@ -89,7 +89,7 @@ org.apache.hadoop.hbase.protobuf.generat
<div class="container">
<div class="row inner_header">
<div class="page-header">
- <h1>RegionServer <small><% serverName.getHostname() %></small></h1>
+ <h1>RegionServer <small><% serverName %></small></h1>
</div>
</div>
<div class="row">
Modified: hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java (original)
+++ hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java Fri Feb 7 06:15:08 2014
@@ -143,6 +143,8 @@ public class LocalHBaseCluster {
// clash over default ports.
conf.set(HConstants.MASTER_PORT, "0");
conf.set(HConstants.REGIONSERVER_PORT, "0");
+ conf.set(HConstants.REGIONSERVER_INFO_PORT, "0");
+
this.masterClass = (Class<? extends HMaster>)
conf.getClass(HConstants.MASTER_IMPL, masterClass);
// Start the HMasters.
Modified: hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java (original)
+++ hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java Fri Feb 7 06:15:08 2014
@@ -112,6 +112,7 @@ import org.apache.hadoop.hbase.protobuf.
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
@@ -2166,6 +2167,15 @@ MasterServices, Server {
return masterActiveTime;
}
+ public int getRegionServerInfoPort(final ServerName sn) {
+ RegionServerInfo info = this.regionServerTracker.getRegionServerInfo(sn);
+ if (info == null || info.getInfoPort() == 0) {
+ return conf.getInt(HConstants.REGIONSERVER_INFO_PORT,
+ HConstants.DEFAULT_REGIONSERVER_INFOPORT);
+ }
+ return info.getInfoPort();
+ }
+
/**
* @return array of coprocessor SimpleNames.
*/
Modified: hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Fri Feb 7 06:15:08 2014
@@ -173,6 +173,7 @@ import org.apache.hadoop.hbase.protobuf.
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest;
@@ -336,8 +337,8 @@ public class HRegionServer implements Cl
// debugging and unit tests.
protected volatile boolean abortRequested;
- // Port we put up the webui on.
- protected int webuiport = -1;
+ // region server static info like info port
+ private RegionServerInfo.Builder rsInfo;
ConcurrentMap<String, Integer> rowlocks = new ConcurrentHashMap<String, Integer>();
@@ -577,6 +578,11 @@ public class HRegionServer implements Cl
this.distributedLogReplay = this.conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY,
HConstants.DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG);
+
+ this.rsInfo = RegionServerInfo.newBuilder();
+ // Put up the webui. Webui may come up on port other than configured if
+ // that port is occupied. Adjust serverInfo if this is the case.
+ this.rsInfo.setInfoPort(putUpWebUI());
}
/**
@@ -1202,9 +1208,10 @@ public class HRegionServer implements Cl
}
}
- private void createMyEphemeralNode() throws KeeperException {
- ZKUtil.createEphemeralNodeAndWatch(this.zooKeeper, getMyEphemeralNodePath(),
- HConstants.EMPTY_BYTE_ARRAY);
+ private void createMyEphemeralNode() throws KeeperException, IOException {
+ byte[] data = ProtobufUtil.prependPBMagic(rsInfo.build().toByteArray());
+ ZKUtil.createEphemeralNodeAndWatch(this.zooKeeper,
+ getMyEphemeralNodePath(), data);
}
private void deleteMyEphemeralNode() throws KeeperException {
@@ -1540,10 +1547,6 @@ public class HRegionServer implements Cl
this.leases.setName(n + ".leaseChecker");
this.leases.start();
- // Put up the webui. Webui may come up on port other than configured if
- // that port is occupied. Adjust serverInfo if this is the case.
- this.webuiport = putUpWebUI();
-
if (this.replicationSourceHandler == this.replicationSinkHandler &&
this.replicationSourceHandler != null) {
this.replicationSourceHandler.startReplicationService();
@@ -1607,7 +1610,7 @@ public class HRegionServer implements Cl
port++;
}
}
- return port;
+ return this.infoServer.getPort();
}
/*
@@ -3941,7 +3944,7 @@ public class HRegionServer implements Cl
final GetServerInfoRequest request) throws ServiceException {
ServerName serverName = getServerName();
requestCount.increment();
- return ResponseConverter.buildGetServerInfoResponse(serverName, webuiport);
+ return ResponseConverter.buildGetServerInfoResponse(serverName, rsInfo.getInfoPort());
}
// End Admin methods
Modified: hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java (original)
+++ hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java Fri Feb 7 06:15:08 2014
@@ -51,6 +51,13 @@ public class RSDumpServlet extends State
assert hrsconf != null : "No RS conf in context";
response.setContentType("text/plain");
+
+ if (!hrs.isOnline()) {
+ response.getWriter().write("The RegionServer is initializing!");
+ response.getWriter().close();
+ return;
+ }
+
OutputStream os = response.getOutputStream();
PrintWriter out = new PrintWriter(os);
Modified: hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java (original)
+++ hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java Fri Feb 7 06:15:08 2014
@@ -41,6 +41,13 @@ public class RSStatusServlet extends Htt
assert hrs != null : "No RS in context!";
resp.setContentType("text/html");
+
+ if (!hrs.isOnline()) {
+ resp.getWriter().write("The RegionServer is initializing!");
+ resp.getWriter().close();
+ return;
+ }
+
RSStatusTmpl tmpl = new RSStatusTmpl();
if (req.getParameter("format") != null)
tmpl.setFormat(req.getParameter("format"));
Modified: hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java (original)
+++ hbase/branches/0.96/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java Fri Feb 7 06:15:08 2014
@@ -21,7 +21,9 @@ package org.apache.hadoop.hbase.zookeepe
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.NavigableMap;
import java.util.NavigableSet;
+import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
@@ -30,6 +32,9 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.ServerManager;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo;
+import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
/**
@@ -45,7 +50,8 @@ import org.apache.zookeeper.KeeperExcept
@InterfaceAudience.Private
public class RegionServerTracker extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(RegionServerTracker.class);
- private NavigableSet<ServerName> regionServers = new TreeSet<ServerName>();
+ private NavigableMap<ServerName, RegionServerInfo> regionServers =
+ new TreeMap<ServerName, RegionServerInfo>();
private ServerManager serverManager;
private Abortable abortable;
@@ -76,7 +82,25 @@ public class RegionServerTracker extends
this.regionServers.clear();
for (String n: servers) {
ServerName sn = ServerName.parseServerName(ZKUtil.getNodeName(n));
- this.regionServers.add(sn);
+ if (regionServers.get(sn) == null) {
+ RegionServerInfo.Builder rsInfoBuilder = RegionServerInfo.newBuilder();
+ try {
+ String nodePath = ZKUtil.joinZNode(watcher.rsZNode, n);
+ byte[] data = ZKUtil.getData(watcher, nodePath);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("RS node: " + nodePath + " data: " + Bytes.toString(data));
+ }
+ if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) {
+ int magicLen = ProtobufUtil.lengthOfPBMagic();
+ rsInfoBuilder.mergeFrom(data, magicLen, data.length - magicLen);
+ }
+ } catch (KeeperException e) {
+ LOG.warn("Get Rs info port from ephemeral node", e);
+ } catch (IOException e) {
+ LOG.warn("Illegal data from ephemeral node", e);
+ }
+ this.regionServers.put(sn, rsInfoBuilder.build());
+ }
}
}
}
@@ -119,13 +143,17 @@ public class RegionServerTracker extends
}
}
+ public RegionServerInfo getRegionServerInfo(final ServerName sn) {
+ return regionServers.get(sn);
+ }
+
/**
* Gets the online servers.
* @return list of online servers
*/
public List<ServerName> getOnlineServers() {
synchronized (this.regionServers) {
- return new ArrayList<ServerName>(this.regionServers);
+ return new ArrayList<ServerName>(this.regionServers.keySet());
}
}
}
Modified: hbase/branches/0.96/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
URL: http://svn.apache.org/viewvc/hbase/branches/0.96/hbase-server/src/main/resources/hbase-webapps/master/table.jsp?rev=1565546&r1=1565545&r2=1565546&view=diff
==============================================================================
--- hbase/branches/0.96/hbase-server/src/main/resources/hbase-webapps/master/table.jsp (original)
+++ hbase/branches/0.96/hbase-server/src/main/resources/hbase-webapps/master/table.jsp Fri Feb 7 06:15:08 2014
@@ -19,7 +19,7 @@
--%>
<%@ page contentType="text/html;charset=UTF-8"
import="static org.apache.commons.lang.StringEscapeUtils.escapeXml"
- import="java.util.HashMap"
+ import="java.util.TreeMap"
import="java.util.Map"
import="org.apache.hadoop.conf.Configuration"
import="org.apache.hadoop.hbase.client.HTable"
@@ -50,9 +50,6 @@
if (showFragmentation) {
frags = FSUtils.getTableFragmentation(master);
}
- // HARDCODED FOR NOW TODO: FIX GET FROM ZK
- // This port might be wrong if RS actually ended up using something else.
- int infoPort = conf.getInt("hbase.regionserver.info.port", 60030);
%>
<!--[if IE]>
<!DOCTYPE html>
@@ -200,11 +197,11 @@
HRegionInfo meta = HRegionInfo.FIRST_META_REGIONINFO;
ServerName metaLocation = master.getCatalogTracker().waitForMeta(1);
for (int i = 0; i < 1; i++) {
- String url = "//" + metaLocation.getHostname() + ":" + infoPort + "/";
+ String url = "//" + metaLocation.getHostname() + ":" + master.getRegionServerInfoPort(metaLocation) + "/";
%>
<tr>
<td><%= escapeXml(meta.getRegionNameAsString()) %></td>
- <td><a href="<%= url %>"><%= metaLocation.getHostname().toString() + ":" + infoPort %></a></td>
+ <td><a href="<%= url %>"><%= metaLocation.getHostname().toString() + ":" + master.getRegionServerInfoPort(metaLocation) %></a></td>
<td>-</td>
<td><%= escapeXml(Bytes.toString(meta.getStartKey())) %></td>
<td><%= escapeXml(Bytes.toString(meta.getEndKey())) %></td>
@@ -252,7 +249,7 @@
<% } %>
</table>
<%
- Map<String, Integer> regDistribution = new HashMap<String, Integer>();
+ Map<ServerName, Integer> regDistribution = new TreeMap<ServerName, Integer>();
Map<HRegionInfo, ServerName> regions = table.getRegionLocations();
if(regions != null && regions.size() > 0) { %>
<%= tableHeader %>
@@ -271,22 +268,20 @@
if (map.containsKey(regionInfo.getRegionName())) {
req = map.get(regionInfo.getRegionName()).getRequestsCount();
}
- // This port might be wrong if RS actually ended up using something else.
- urlRegionServer =
- addr.getHostname().toString() + ":" + infoPort;
- Integer i = regDistribution.get(urlRegionServer);
+ Integer i = regDistribution.get(addr);
if (null == i) i = Integer.valueOf(0);
- regDistribution.put(urlRegionServer, i+1);
+ regDistribution.put(addr, i + 1);
}
}
%>
<tr>
<td><%= escapeXml(Bytes.toStringBinary(regionInfo.getRegionName())) %></td>
<%
- if (urlRegionServer != null) {
+ if (addr != null) {
+ String url = addr.getHostname() + ":" + master.getRegionServerInfoPort(addr) + "/";
%>
<td>
- <a href="<%= "//" + urlRegionServer + "/" %>"><%= urlRegionServer %></a>
+ <a href="<%= url %>"><%= addr.getHostname().toString() + ":" + addr.getPort() %></a>
</td>
<%
} else {
@@ -304,10 +299,12 @@
<h2>Regions by Region Server</h2>
<table class="table table-striped"><tr><th>Region Server</th><th>Region Count</th></tr>
<%
- for (Map.Entry<String, Integer> rdEntry : regDistribution.entrySet()) {
+ for (Map.Entry<ServerName, Integer> rdEntry : regDistribution.entrySet()) {
+ ServerName addr = rdEntry.getKey();
+ String url = addr.getHostname() + ":" + master.getRegionServerInfoPort(addr) + "/";
%>
<tr>
- <td><a href="<%= "//" + rdEntry.getKey() + "/" %>"><%= rdEntry.getKey()%></a></td>
+ <td><a href="<%= url %>"><%= addr.getHostname().toString() + ":" + addr.getPort() %></a></td>
<td><%= rdEntry.getValue()%></td>
</tr>
<% } %>